blob: a239dd1d4c0a5e2e7a4cea93ce6c0bc3e6dd0972 [file] [log] [blame]
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001// Copyright (c) 2015-2016 The Khronos Group Inc.
2//
3// Permission is hereby granted, free of charge, to any person obtaining a
4// copy of this software and/or associated documentation files (the
5// "Materials"), to deal in the Materials without restriction, including
6// without limitation the rights to use, copy, modify, merge, publish,
7// distribute, sublicense, and/or sell copies of the Materials, and to
8// permit persons to whom the Materials are furnished to do so, subject to
9// the following conditions:
10//
11// The above copyright notice and this permission notice shall be included
12// in all copies or substantial portions of the Materials.
13//
14// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
21
22// This header is generated from the Khronos Vulkan XML API Registry.
23
24
25#ifndef VULKAN_HPP
26#define VULKAN_HPP
27
28#include <algorithm>
29#include <array>
30#include <cassert>
31#include <cstdint>
32#include <cstring>
33#include <initializer_list>
34#include <string>
35#include <system_error>
Mark Lobodzinski2d589822016-12-12 09:44:34 -070036#include <tuple>
Lenny Komowbed9b5c2016-08-11 11:23:15 -060037#include <type_traits>
38#include <vulkan/vulkan.h>
39#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
40# include <memory>
41# include <vector>
42#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
43
Mark Lobodzinskicbaa2cd2016-12-19 09:41:16 -070044static_assert( VK_HEADER_VERSION == 38 , "Wrong VK_HEADER_VERSION!" );
Lenny Komowbed9b5c2016-08-11 11:23:15 -060045
46// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
47// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
Endre Oma5d2c7ec2016-09-01 17:56:41 +020048#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060049#define VULKAN_HPP_TYPESAFE_CONVERSION 1
50#endif
51
52#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
53# if defined(__clang__)
54# if __has_feature(cxx_unrestricted_unions)
55# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
56# endif
57# elif defined(__GNUC__)
Lenny Komow6501c122016-08-31 15:03:49 -060058# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060059# if 40600 <= GCC_VERSION
60# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
61# endif
62# elif defined(_MSC_VER)
63# if 1900 <= _MSC_VER
64# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
65# endif
66# endif
67#endif
68
Mark Lobodzinski2d589822016-12-12 09:44:34 -070069
70#if !defined(VULKAN_HPP_INLINE)
71# if defined(__clang___)
72# if __has_attribute(always_inline)
73# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
74# else
75# define VULKAN_HPP_INLINE inline
76# endif
77# elif defined(__GNUC__)
78# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
79# elif defined(_MSC_VER)
80# define VULKAN_HPP_INLINE __forceinline
81# else
82# define VULKAN_HPP_INLINE inline
83# endif
84#endif
85
Lenny Komowbed9b5c2016-08-11 11:23:15 -060086namespace vk
87{
Mark Lobodzinski2d589822016-12-12 09:44:34 -070088 template <typename FlagBitsType> struct FlagTraits
89 {
90 enum { allFlags = 0 };
91 };
92
Lenny Komowbed9b5c2016-08-11 11:23:15 -060093 template <typename BitType, typename MaskType = VkFlags>
94 class Flags
95 {
96 public:
97 Flags()
98 : m_mask(0)
99 {
100 }
101
102 Flags(BitType bit)
103 : m_mask(static_cast<MaskType>(bit))
104 {
105 }
106
107 Flags(Flags<BitType> const& rhs)
108 : m_mask(rhs.m_mask)
109 {
110 }
111
112 Flags<BitType> & operator=(Flags<BitType> const& rhs)
113 {
114 m_mask = rhs.m_mask;
115 return *this;
116 }
117
118 Flags<BitType> & operator|=(Flags<BitType> const& rhs)
119 {
120 m_mask |= rhs.m_mask;
121 return *this;
122 }
123
124 Flags<BitType> & operator&=(Flags<BitType> const& rhs)
125 {
126 m_mask &= rhs.m_mask;
127 return *this;
128 }
129
130 Flags<BitType> & operator^=(Flags<BitType> const& rhs)
131 {
132 m_mask ^= rhs.m_mask;
133 return *this;
134 }
135
136 Flags<BitType> operator|(Flags<BitType> const& rhs) const
137 {
138 Flags<BitType> result(*this);
139 result |= rhs;
140 return result;
141 }
142
143 Flags<BitType> operator&(Flags<BitType> const& rhs) const
144 {
145 Flags<BitType> result(*this);
146 result &= rhs;
147 return result;
148 }
149
150 Flags<BitType> operator^(Flags<BitType> const& rhs) const
151 {
152 Flags<BitType> result(*this);
153 result ^= rhs;
154 return result;
155 }
156
157 bool operator!() const
158 {
159 return !m_mask;
160 }
161
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700162 Flags<BitType> operator~() const
163 {
164 Flags<BitType> result(*this);
165 result.m_mask ^= FlagTraits<BitType>::allFlags;
166 return result;
167 }
168
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600169 bool operator==(Flags<BitType> const& rhs) const
170 {
171 return m_mask == rhs.m_mask;
172 }
173
174 bool operator!=(Flags<BitType> const& rhs) const
175 {
176 return m_mask != rhs.m_mask;
177 }
178
179 explicit operator bool() const
180 {
181 return !!m_mask;
182 }
183
184 explicit operator MaskType() const
185 {
186 return m_mask;
187 }
188
189 private:
190 MaskType m_mask;
191 };
192
193 template <typename BitType>
194 Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
195 {
196 return flags | bit;
197 }
198
199 template <typename BitType>
200 Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
201 {
202 return flags & bit;
203 }
204
205 template <typename BitType>
206 Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
207 {
208 return flags ^ bit;
209 }
210
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700211
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600212 template <typename RefType>
213 class Optional
214 {
215 public:
216 Optional(RefType & reference) { m_ptr = &reference; }
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700217 Optional(RefType * ptr) { m_ptr = ptr; }
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600218 Optional(std::nullptr_t) { m_ptr = nullptr; }
219
220 operator RefType*() const { return m_ptr; }
221 RefType const* operator->() const { return m_ptr; }
222 explicit operator bool() const { return !!m_ptr; }
223
224 private:
225 RefType *m_ptr;
226 };
227
228#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
229 template <typename T>
230 class ArrayProxy
231 {
232 public:
233 ArrayProxy(std::nullptr_t)
234 : m_count(0)
235 , m_ptr(nullptr)
236 {}
237
238 ArrayProxy(T & ptr)
239 : m_count(1)
240 , m_ptr(&ptr)
241 {}
242
243 ArrayProxy(uint32_t count, T * ptr)
244 : m_count(count)
245 , m_ptr(ptr)
246 {}
247
248 template <size_t N>
249 ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
250 : m_count(N)
251 , m_ptr(data.data())
252 {}
253
254 template <size_t N>
255 ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
256 : m_count(N)
257 , m_ptr(data.data())
258 {}
259
260 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
261 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
262 : m_count(static_cast<uint32_t>(data.size()))
263 , m_ptr(data.data())
264 {}
265
266 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
267 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
268 : m_count(static_cast<uint32_t>(data.size()))
269 , m_ptr(data.data())
270 {}
271
272 ArrayProxy(std::initializer_list<T> const& data)
273 : m_count(static_cast<uint32_t>(data.end() - data.begin()))
274 , m_ptr(data.begin())
275 {}
276
277 const T * begin() const
278 {
279 return m_ptr;
280 }
281
282 const T * end() const
283 {
284 return m_ptr + m_count;
285 }
286
287 const T & front() const
288 {
289 assert(m_count && m_ptr);
290 return *m_ptr;
291 }
292
293 const T & back() const
294 {
295 assert(m_count && m_ptr);
296 return *(m_ptr + m_count - 1);
297 }
298
299 bool empty() const
300 {
301 return (m_count == 0);
302 }
303
304 uint32_t size() const
305 {
306 return m_count;
307 }
308
309 T * data() const
310 {
311 return m_ptr;
312 }
313
314 private:
315 uint32_t m_count;
316 T * m_ptr;
317 };
318#endif
319
320 enum class Result
321 {
322 eSuccess = VK_SUCCESS,
323 eNotReady = VK_NOT_READY,
324 eTimeout = VK_TIMEOUT,
325 eEventSet = VK_EVENT_SET,
326 eEventReset = VK_EVENT_RESET,
327 eIncomplete = VK_INCOMPLETE,
328 eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
329 eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
330 eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
331 eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
332 eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
333 eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
334 eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
335 eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
336 eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
337 eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
338 eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
Lenny Komowebf33162016-08-26 14:10:08 -0600339 eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600340 eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
341 eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
342 eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
343 eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
344 eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
345 eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
346 eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV
347 };
348
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700349 VULKAN_HPP_INLINE std::string to_string(Result value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600350 {
351 switch (value)
352 {
353 case Result::eSuccess: return "Success";
354 case Result::eNotReady: return "NotReady";
355 case Result::eTimeout: return "Timeout";
356 case Result::eEventSet: return "EventSet";
357 case Result::eEventReset: return "EventReset";
358 case Result::eIncomplete: return "Incomplete";
359 case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
360 case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
361 case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
362 case Result::eErrorDeviceLost: return "ErrorDeviceLost";
363 case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
364 case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
365 case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
366 case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
367 case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
368 case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
369 case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
Lenny Komowebf33162016-08-26 14:10:08 -0600370 case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600371 case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
372 case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
373 case Result::eSuboptimalKHR: return "SuboptimalKHR";
374 case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
375 case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
376 case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
377 case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
378 default: return "invalid";
379 }
380 }
381
382#if defined(_MSC_VER) && (_MSC_VER == 1800)
383# define noexcept _NOEXCEPT
384#endif
385
386 class ErrorCategoryImpl : public std::error_category
387 {
388 public:
389 virtual const char* name() const noexcept override { return "vk::Result"; }
390 virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
391 };
392
393#if defined(_MSC_VER) && (_MSC_VER == 1800)
394# undef noexcept
395#endif
396
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700397 VULKAN_HPP_INLINE const std::error_category& errorCategory()
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600398 {
399 static ErrorCategoryImpl instance;
400 return instance;
401 }
402
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700403 VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600404 {
405 return std::error_code(static_cast<int>(e), errorCategory());
406 }
407
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700408 VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600409 {
410 return std::error_condition(static_cast<int>(e), errorCategory());
411 }
412
413} // namespace vk
414
415namespace std
416{
417 template <>
418 struct is_error_code_enum<vk::Result> : public true_type
419 {};
420}
421
422namespace vk
423{
424 template <typename T>
425 struct ResultValue
426 {
427 ResultValue( Result r, T & v )
428 : result( r )
429 , value( v )
430 {}
431
432 Result result;
433 T value;
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700434
435 operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600436 };
437
438 template <typename T>
439 struct ResultValueType
440 {
441#ifdef VULKAN_HPP_NO_EXCEPTIONS
442 typedef ResultValue<T> type;
443#else
444 typedef T type;
445#endif
446 };
447
448 template <> struct ResultValueType<void>
449 {
450#ifdef VULKAN_HPP_NO_EXCEPTIONS
451 typedef Result type;
452#else
453 typedef void type;
454#endif
455 };
456
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700457 VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600458 {
459#ifdef VULKAN_HPP_NO_EXCEPTIONS
460 assert( result == Result::eSuccess );
461 return result;
462#else
463 if ( result != Result::eSuccess )
464 {
465 throw std::system_error( result, message );
466 }
467#endif
468 }
469
470 template <typename T>
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700471 VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600472 {
473#ifdef VULKAN_HPP_NO_EXCEPTIONS
474 assert( result == Result::eSuccess );
475 return ResultValue<T>( result, data );
476#else
477 if ( result != Result::eSuccess )
478 {
479 throw std::system_error( result, message );
480 }
481 return data;
482#endif
483 }
484
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700485 VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600486 {
487#ifdef VULKAN_HPP_NO_EXCEPTIONS
488 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
489#else
490 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
491 {
492 throw std::system_error( result, message );
493 }
494#endif
495 return result;
496 }
497
498 template <typename T>
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700499 VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600500 {
501#ifdef VULKAN_HPP_NO_EXCEPTIONS
502 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
503#else
504 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
505 {
506 throw std::system_error( result, message );
507 }
508#endif
509 return ResultValue<T>( result, data );
510 }
511
512 using SampleMask = uint32_t;
513
514 using Bool32 = uint32_t;
515
516 using DeviceSize = uint64_t;
517
518 enum class FramebufferCreateFlagBits
519 {
520 };
521
522 using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
523
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700524 VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600525 {
526 return FramebufferCreateFlags( bit0 ) | bit1;
527 }
528
529 enum class QueryPoolCreateFlagBits
530 {
531 };
532
533 using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
534
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700535 VULKAN_HPP_INLINE QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600536 {
537 return QueryPoolCreateFlags( bit0 ) | bit1;
538 }
539
540 enum class RenderPassCreateFlagBits
541 {
542 };
543
544 using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
545
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700546 VULKAN_HPP_INLINE RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600547 {
548 return RenderPassCreateFlags( bit0 ) | bit1;
549 }
550
551 enum class SamplerCreateFlagBits
552 {
553 };
554
555 using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
556
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700557 VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600558 {
559 return SamplerCreateFlags( bit0 ) | bit1;
560 }
561
562 enum class PipelineLayoutCreateFlagBits
563 {
564 };
565
566 using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
567
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700568 VULKAN_HPP_INLINE PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600569 {
570 return PipelineLayoutCreateFlags( bit0 ) | bit1;
571 }
572
573 enum class PipelineCacheCreateFlagBits
574 {
575 };
576
577 using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
578
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700579 VULKAN_HPP_INLINE PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600580 {
581 return PipelineCacheCreateFlags( bit0 ) | bit1;
582 }
583
584 enum class PipelineDepthStencilStateCreateFlagBits
585 {
586 };
587
588 using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
589
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700590 VULKAN_HPP_INLINE PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600591 {
592 return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
593 }
594
595 enum class PipelineDynamicStateCreateFlagBits
596 {
597 };
598
599 using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
600
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700601 VULKAN_HPP_INLINE PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600602 {
603 return PipelineDynamicStateCreateFlags( bit0 ) | bit1;
604 }
605
606 enum class PipelineColorBlendStateCreateFlagBits
607 {
608 };
609
610 using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
611
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700612 VULKAN_HPP_INLINE PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600613 {
614 return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
615 }
616
617 enum class PipelineMultisampleStateCreateFlagBits
618 {
619 };
620
621 using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
622
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700623 VULKAN_HPP_INLINE PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600624 {
625 return PipelineMultisampleStateCreateFlags( bit0 ) | bit1;
626 }
627
628 enum class PipelineRasterizationStateCreateFlagBits
629 {
630 };
631
632 using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
633
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700634 VULKAN_HPP_INLINE PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600635 {
636 return PipelineRasterizationStateCreateFlags( bit0 ) | bit1;
637 }
638
639 enum class PipelineViewportStateCreateFlagBits
640 {
641 };
642
643 using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
644
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700645 VULKAN_HPP_INLINE PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600646 {
647 return PipelineViewportStateCreateFlags( bit0 ) | bit1;
648 }
649
650 enum class PipelineTessellationStateCreateFlagBits
651 {
652 };
653
654 using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
655
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700656 VULKAN_HPP_INLINE PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600657 {
658 return PipelineTessellationStateCreateFlags( bit0 ) | bit1;
659 }
660
661 enum class PipelineInputAssemblyStateCreateFlagBits
662 {
663 };
664
665 using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
666
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700667 VULKAN_HPP_INLINE PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600668 {
669 return PipelineInputAssemblyStateCreateFlags( bit0 ) | bit1;
670 }
671
672 enum class PipelineVertexInputStateCreateFlagBits
673 {
674 };
675
676 using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
677
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700678 VULKAN_HPP_INLINE PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600679 {
680 return PipelineVertexInputStateCreateFlags( bit0 ) | bit1;
681 }
682
683 enum class PipelineShaderStageCreateFlagBits
684 {
685 };
686
687 using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
688
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700689 VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600690 {
691 return PipelineShaderStageCreateFlags( bit0 ) | bit1;
692 }
693
694 enum class DescriptorSetLayoutCreateFlagBits
695 {
696 };
697
698 using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
699
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700700 VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600701 {
702 return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
703 }
704
705 enum class BufferViewCreateFlagBits
706 {
707 };
708
709 using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
710
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700711 VULKAN_HPP_INLINE BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600712 {
713 return BufferViewCreateFlags( bit0 ) | bit1;
714 }
715
716 enum class InstanceCreateFlagBits
717 {
718 };
719
720 using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
721
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700722 VULKAN_HPP_INLINE InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600723 {
724 return InstanceCreateFlags( bit0 ) | bit1;
725 }
726
727 enum class DeviceCreateFlagBits
728 {
729 };
730
731 using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
732
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700733 VULKAN_HPP_INLINE DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600734 {
735 return DeviceCreateFlags( bit0 ) | bit1;
736 }
737
738 enum class DeviceQueueCreateFlagBits
739 {
740 };
741
742 using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
743
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700744 VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600745 {
746 return DeviceQueueCreateFlags( bit0 ) | bit1;
747 }
748
749 enum class ImageViewCreateFlagBits
750 {
751 };
752
753 using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
754
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700755 VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600756 {
757 return ImageViewCreateFlags( bit0 ) | bit1;
758 }
759
760 enum class SemaphoreCreateFlagBits
761 {
762 };
763
764 using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
765
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700766 VULKAN_HPP_INLINE SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600767 {
768 return SemaphoreCreateFlags( bit0 ) | bit1;
769 }
770
771 enum class ShaderModuleCreateFlagBits
772 {
773 };
774
775 using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
776
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700777 VULKAN_HPP_INLINE ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600778 {
779 return ShaderModuleCreateFlags( bit0 ) | bit1;
780 }
781
782 enum class EventCreateFlagBits
783 {
784 };
785
786 using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
787
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700788 VULKAN_HPP_INLINE EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600789 {
790 return EventCreateFlags( bit0 ) | bit1;
791 }
792
793 enum class MemoryMapFlagBits
794 {
795 };
796
797 using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
798
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700799 VULKAN_HPP_INLINE MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600800 {
801 return MemoryMapFlags( bit0 ) | bit1;
802 }
803
804 enum class SubpassDescriptionFlagBits
805 {
806 };
807
808 using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
809
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700810 VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600811 {
812 return SubpassDescriptionFlags( bit0 ) | bit1;
813 }
814
815 enum class DescriptorPoolResetFlagBits
816 {
817 };
818
819 using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
820
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700821 VULKAN_HPP_INLINE DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600822 {
823 return DescriptorPoolResetFlags( bit0 ) | bit1;
824 }
825
826 enum class SwapchainCreateFlagBitsKHR
827 {
828 };
829
830 using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
831
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700832 VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600833 {
834 return SwapchainCreateFlagsKHR( bit0 ) | bit1;
835 }
836
837 enum class DisplayModeCreateFlagBitsKHR
838 {
839 };
840
841 using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
842
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700843 VULKAN_HPP_INLINE DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600844 {
845 return DisplayModeCreateFlagsKHR( bit0 ) | bit1;
846 }
847
848 enum class DisplaySurfaceCreateFlagBitsKHR
849 {
850 };
851
852 using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
853
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700854 VULKAN_HPP_INLINE DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600855 {
856 return DisplaySurfaceCreateFlagsKHR( bit0 ) | bit1;
857 }
858
859#ifdef VK_USE_PLATFORM_ANDROID_KHR
860 enum class AndroidSurfaceCreateFlagBitsKHR
861 {
862 };
863#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
864
865#ifdef VK_USE_PLATFORM_ANDROID_KHR
866 using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
867
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700868 VULKAN_HPP_INLINE AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600869 {
870 return AndroidSurfaceCreateFlagsKHR( bit0 ) | bit1;
871 }
872#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
873
874#ifdef VK_USE_PLATFORM_MIR_KHR
875 enum class MirSurfaceCreateFlagBitsKHR
876 {
877 };
878#endif /*VK_USE_PLATFORM_MIR_KHR*/
879
880#ifdef VK_USE_PLATFORM_MIR_KHR
881 using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
882
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700883 VULKAN_HPP_INLINE MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600884 {
885 return MirSurfaceCreateFlagsKHR( bit0 ) | bit1;
886 }
887#endif /*VK_USE_PLATFORM_MIR_KHR*/
888
889#ifdef VK_USE_PLATFORM_WAYLAND_KHR
890 enum class WaylandSurfaceCreateFlagBitsKHR
891 {
892 };
893#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
894
895#ifdef VK_USE_PLATFORM_WAYLAND_KHR
896 using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
897
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700898 VULKAN_HPP_INLINE WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600899 {
900 return WaylandSurfaceCreateFlagsKHR( bit0 ) | bit1;
901 }
902#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
903
904#ifdef VK_USE_PLATFORM_WIN32_KHR
905 enum class Win32SurfaceCreateFlagBitsKHR
906 {
907 };
908#endif /*VK_USE_PLATFORM_WIN32_KHR*/
909
910#ifdef VK_USE_PLATFORM_WIN32_KHR
911 using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
912
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700913 VULKAN_HPP_INLINE Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600914 {
915 return Win32SurfaceCreateFlagsKHR( bit0 ) | bit1;
916 }
917#endif /*VK_USE_PLATFORM_WIN32_KHR*/
918
919#ifdef VK_USE_PLATFORM_XLIB_KHR
920 enum class XlibSurfaceCreateFlagBitsKHR
921 {
922 };
923#endif /*VK_USE_PLATFORM_XLIB_KHR*/
924
925#ifdef VK_USE_PLATFORM_XLIB_KHR
926 using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
927
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700928 VULKAN_HPP_INLINE XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600929 {
930 return XlibSurfaceCreateFlagsKHR( bit0 ) | bit1;
931 }
932#endif /*VK_USE_PLATFORM_XLIB_KHR*/
933
934#ifdef VK_USE_PLATFORM_XCB_KHR
935 enum class XcbSurfaceCreateFlagBitsKHR
936 {
937 };
938#endif /*VK_USE_PLATFORM_XCB_KHR*/
939
940#ifdef VK_USE_PLATFORM_XCB_KHR
941 using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
942
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700943 VULKAN_HPP_INLINE XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600944 {
945 return XcbSurfaceCreateFlagsKHR( bit0 ) | bit1;
946 }
947#endif /*VK_USE_PLATFORM_XCB_KHR*/
948
949 class DeviceMemory
950 {
951 public:
952 DeviceMemory()
953 : m_deviceMemory(VK_NULL_HANDLE)
954 {}
955
956#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
957 DeviceMemory(VkDeviceMemory deviceMemory)
958 : m_deviceMemory(deviceMemory)
959 {}
960
961 DeviceMemory& operator=(VkDeviceMemory deviceMemory)
962 {
963 m_deviceMemory = deviceMemory;
964 return *this;
965 }
966#endif
967
Lenny Komowebf33162016-08-26 14:10:08 -0600968 bool operator==(DeviceMemory const &rhs) const
969 {
970 return m_deviceMemory == rhs.m_deviceMemory;
971 }
972
973 bool operator!=(DeviceMemory const &rhs) const
974 {
975 return m_deviceMemory != rhs.m_deviceMemory;
976 }
977
978 bool operator<(DeviceMemory const &rhs) const
979 {
980 return m_deviceMemory < rhs.m_deviceMemory;
981 }
982
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600983#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
984 explicit
985#endif
986 operator VkDeviceMemory() const
987 {
988 return m_deviceMemory;
989 }
990
991 explicit operator bool() const
992 {
993 return m_deviceMemory != VK_NULL_HANDLE;
994 }
995
996 bool operator!() const
997 {
998 return m_deviceMemory == VK_NULL_HANDLE;
999 }
1000
1001 private:
1002 VkDeviceMemory m_deviceMemory;
1003 };
1004 static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
1005
1006 class CommandPool
1007 {
1008 public:
1009 CommandPool()
1010 : m_commandPool(VK_NULL_HANDLE)
1011 {}
1012
1013#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1014 CommandPool(VkCommandPool commandPool)
1015 : m_commandPool(commandPool)
1016 {}
1017
1018 CommandPool& operator=(VkCommandPool commandPool)
1019 {
1020 m_commandPool = commandPool;
1021 return *this;
1022 }
1023#endif
1024
Lenny Komowebf33162016-08-26 14:10:08 -06001025 bool operator==(CommandPool const &rhs) const
1026 {
1027 return m_commandPool == rhs.m_commandPool;
1028 }
1029
1030 bool operator!=(CommandPool const &rhs) const
1031 {
1032 return m_commandPool != rhs.m_commandPool;
1033 }
1034
1035 bool operator<(CommandPool const &rhs) const
1036 {
1037 return m_commandPool < rhs.m_commandPool;
1038 }
1039
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001040#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1041 explicit
1042#endif
1043 operator VkCommandPool() const
1044 {
1045 return m_commandPool;
1046 }
1047
1048 explicit operator bool() const
1049 {
1050 return m_commandPool != VK_NULL_HANDLE;
1051 }
1052
1053 bool operator!() const
1054 {
1055 return m_commandPool == VK_NULL_HANDLE;
1056 }
1057
1058 private:
1059 VkCommandPool m_commandPool;
1060 };
1061 static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
1062
1063 class Buffer
1064 {
1065 public:
1066 Buffer()
1067 : m_buffer(VK_NULL_HANDLE)
1068 {}
1069
1070#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1071 Buffer(VkBuffer buffer)
1072 : m_buffer(buffer)
1073 {}
1074
1075 Buffer& operator=(VkBuffer buffer)
1076 {
1077 m_buffer = buffer;
1078 return *this;
1079 }
1080#endif
1081
Lenny Komowebf33162016-08-26 14:10:08 -06001082 bool operator==(Buffer const &rhs) const
1083 {
1084 return m_buffer == rhs.m_buffer;
1085 }
1086
1087 bool operator!=(Buffer const &rhs) const
1088 {
1089 return m_buffer != rhs.m_buffer;
1090 }
1091
1092 bool operator<(Buffer const &rhs) const
1093 {
1094 return m_buffer < rhs.m_buffer;
1095 }
1096
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001097#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1098 explicit
1099#endif
1100 operator VkBuffer() const
1101 {
1102 return m_buffer;
1103 }
1104
1105 explicit operator bool() const
1106 {
1107 return m_buffer != VK_NULL_HANDLE;
1108 }
1109
1110 bool operator!() const
1111 {
1112 return m_buffer == VK_NULL_HANDLE;
1113 }
1114
1115 private:
1116 VkBuffer m_buffer;
1117 };
1118 static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
1119
1120 class BufferView
1121 {
1122 public:
1123 BufferView()
1124 : m_bufferView(VK_NULL_HANDLE)
1125 {}
1126
1127#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1128 BufferView(VkBufferView bufferView)
1129 : m_bufferView(bufferView)
1130 {}
1131
1132 BufferView& operator=(VkBufferView bufferView)
1133 {
1134 m_bufferView = bufferView;
1135 return *this;
1136 }
1137#endif
1138
Lenny Komowebf33162016-08-26 14:10:08 -06001139 bool operator==(BufferView const &rhs) const
1140 {
1141 return m_bufferView == rhs.m_bufferView;
1142 }
1143
1144 bool operator!=(BufferView const &rhs) const
1145 {
1146 return m_bufferView != rhs.m_bufferView;
1147 }
1148
1149 bool operator<(BufferView const &rhs) const
1150 {
1151 return m_bufferView < rhs.m_bufferView;
1152 }
1153
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001154#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1155 explicit
1156#endif
1157 operator VkBufferView() const
1158 {
1159 return m_bufferView;
1160 }
1161
1162 explicit operator bool() const
1163 {
1164 return m_bufferView != VK_NULL_HANDLE;
1165 }
1166
1167 bool operator!() const
1168 {
1169 return m_bufferView == VK_NULL_HANDLE;
1170 }
1171
1172 private:
1173 VkBufferView m_bufferView;
1174 };
1175 static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
1176
1177 class Image
1178 {
1179 public:
1180 Image()
1181 : m_image(VK_NULL_HANDLE)
1182 {}
1183
1184#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1185 Image(VkImage image)
1186 : m_image(image)
1187 {}
1188
1189 Image& operator=(VkImage image)
1190 {
1191 m_image = image;
1192 return *this;
1193 }
1194#endif
1195
Lenny Komowebf33162016-08-26 14:10:08 -06001196 bool operator==(Image const &rhs) const
1197 {
1198 return m_image == rhs.m_image;
1199 }
1200
1201 bool operator!=(Image const &rhs) const
1202 {
1203 return m_image != rhs.m_image;
1204 }
1205
1206 bool operator<(Image const &rhs) const
1207 {
1208 return m_image < rhs.m_image;
1209 }
1210
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001211#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1212 explicit
1213#endif
1214 operator VkImage() const
1215 {
1216 return m_image;
1217 }
1218
1219 explicit operator bool() const
1220 {
1221 return m_image != VK_NULL_HANDLE;
1222 }
1223
1224 bool operator!() const
1225 {
1226 return m_image == VK_NULL_HANDLE;
1227 }
1228
1229 private:
1230 VkImage m_image;
1231 };
1232 static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
1233
1234 class ImageView
1235 {
1236 public:
1237 ImageView()
1238 : m_imageView(VK_NULL_HANDLE)
1239 {}
1240
1241#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1242 ImageView(VkImageView imageView)
1243 : m_imageView(imageView)
1244 {}
1245
1246 ImageView& operator=(VkImageView imageView)
1247 {
1248 m_imageView = imageView;
1249 return *this;
1250 }
1251#endif
1252
Lenny Komowebf33162016-08-26 14:10:08 -06001253 bool operator==(ImageView const &rhs) const
1254 {
1255 return m_imageView == rhs.m_imageView;
1256 }
1257
1258 bool operator!=(ImageView const &rhs) const
1259 {
1260 return m_imageView != rhs.m_imageView;
1261 }
1262
1263 bool operator<(ImageView const &rhs) const
1264 {
1265 return m_imageView < rhs.m_imageView;
1266 }
1267
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001268#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1269 explicit
1270#endif
1271 operator VkImageView() const
1272 {
1273 return m_imageView;
1274 }
1275
1276 explicit operator bool() const
1277 {
1278 return m_imageView != VK_NULL_HANDLE;
1279 }
1280
1281 bool operator!() const
1282 {
1283 return m_imageView == VK_NULL_HANDLE;
1284 }
1285
1286 private:
1287 VkImageView m_imageView;
1288 };
1289 static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
1290
1291 class ShaderModule
1292 {
1293 public:
1294 ShaderModule()
1295 : m_shaderModule(VK_NULL_HANDLE)
1296 {}
1297
1298#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1299 ShaderModule(VkShaderModule shaderModule)
1300 : m_shaderModule(shaderModule)
1301 {}
1302
1303 ShaderModule& operator=(VkShaderModule shaderModule)
1304 {
1305 m_shaderModule = shaderModule;
1306 return *this;
1307 }
1308#endif
1309
Lenny Komowebf33162016-08-26 14:10:08 -06001310 bool operator==(ShaderModule const &rhs) const
1311 {
1312 return m_shaderModule == rhs.m_shaderModule;
1313 }
1314
1315 bool operator!=(ShaderModule const &rhs) const
1316 {
1317 return m_shaderModule != rhs.m_shaderModule;
1318 }
1319
1320 bool operator<(ShaderModule const &rhs) const
1321 {
1322 return m_shaderModule < rhs.m_shaderModule;
1323 }
1324
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001325#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1326 explicit
1327#endif
1328 operator VkShaderModule() const
1329 {
1330 return m_shaderModule;
1331 }
1332
1333 explicit operator bool() const
1334 {
1335 return m_shaderModule != VK_NULL_HANDLE;
1336 }
1337
1338 bool operator!() const
1339 {
1340 return m_shaderModule == VK_NULL_HANDLE;
1341 }
1342
1343 private:
1344 VkShaderModule m_shaderModule;
1345 };
1346 static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
1347
1348 class Pipeline
1349 {
1350 public:
1351 Pipeline()
1352 : m_pipeline(VK_NULL_HANDLE)
1353 {}
1354
1355#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1356 Pipeline(VkPipeline pipeline)
1357 : m_pipeline(pipeline)
1358 {}
1359
1360 Pipeline& operator=(VkPipeline pipeline)
1361 {
1362 m_pipeline = pipeline;
1363 return *this;
1364 }
1365#endif
1366
Lenny Komowebf33162016-08-26 14:10:08 -06001367 bool operator==(Pipeline const &rhs) const
1368 {
1369 return m_pipeline == rhs.m_pipeline;
1370 }
1371
1372 bool operator!=(Pipeline const &rhs) const
1373 {
1374 return m_pipeline != rhs.m_pipeline;
1375 }
1376
1377 bool operator<(Pipeline const &rhs) const
1378 {
1379 return m_pipeline < rhs.m_pipeline;
1380 }
1381
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001382#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1383 explicit
1384#endif
1385 operator VkPipeline() const
1386 {
1387 return m_pipeline;
1388 }
1389
1390 explicit operator bool() const
1391 {
1392 return m_pipeline != VK_NULL_HANDLE;
1393 }
1394
1395 bool operator!() const
1396 {
1397 return m_pipeline == VK_NULL_HANDLE;
1398 }
1399
1400 private:
1401 VkPipeline m_pipeline;
1402 };
1403 static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
1404
1405 class PipelineLayout
1406 {
1407 public:
1408 PipelineLayout()
1409 : m_pipelineLayout(VK_NULL_HANDLE)
1410 {}
1411
1412#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1413 PipelineLayout(VkPipelineLayout pipelineLayout)
1414 : m_pipelineLayout(pipelineLayout)
1415 {}
1416
1417 PipelineLayout& operator=(VkPipelineLayout pipelineLayout)
1418 {
1419 m_pipelineLayout = pipelineLayout;
1420 return *this;
1421 }
1422#endif
1423
Lenny Komowebf33162016-08-26 14:10:08 -06001424 bool operator==(PipelineLayout const &rhs) const
1425 {
1426 return m_pipelineLayout == rhs.m_pipelineLayout;
1427 }
1428
1429 bool operator!=(PipelineLayout const &rhs) const
1430 {
1431 return m_pipelineLayout != rhs.m_pipelineLayout;
1432 }
1433
1434 bool operator<(PipelineLayout const &rhs) const
1435 {
1436 return m_pipelineLayout < rhs.m_pipelineLayout;
1437 }
1438
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001439#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1440 explicit
1441#endif
1442 operator VkPipelineLayout() const
1443 {
1444 return m_pipelineLayout;
1445 }
1446
1447 explicit operator bool() const
1448 {
1449 return m_pipelineLayout != VK_NULL_HANDLE;
1450 }
1451
1452 bool operator!() const
1453 {
1454 return m_pipelineLayout == VK_NULL_HANDLE;
1455 }
1456
1457 private:
1458 VkPipelineLayout m_pipelineLayout;
1459 };
1460 static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
1461
1462 class Sampler
1463 {
1464 public:
1465 Sampler()
1466 : m_sampler(VK_NULL_HANDLE)
1467 {}
1468
1469#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1470 Sampler(VkSampler sampler)
1471 : m_sampler(sampler)
1472 {}
1473
1474 Sampler& operator=(VkSampler sampler)
1475 {
1476 m_sampler = sampler;
1477 return *this;
1478 }
1479#endif
1480
Lenny Komowebf33162016-08-26 14:10:08 -06001481 bool operator==(Sampler const &rhs) const
1482 {
1483 return m_sampler == rhs.m_sampler;
1484 }
1485
1486 bool operator!=(Sampler const &rhs) const
1487 {
1488 return m_sampler != rhs.m_sampler;
1489 }
1490
1491 bool operator<(Sampler const &rhs) const
1492 {
1493 return m_sampler < rhs.m_sampler;
1494 }
1495
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001496#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1497 explicit
1498#endif
1499 operator VkSampler() const
1500 {
1501 return m_sampler;
1502 }
1503
1504 explicit operator bool() const
1505 {
1506 return m_sampler != VK_NULL_HANDLE;
1507 }
1508
1509 bool operator!() const
1510 {
1511 return m_sampler == VK_NULL_HANDLE;
1512 }
1513
1514 private:
1515 VkSampler m_sampler;
1516 };
1517 static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
1518
1519 class DescriptorSet
1520 {
1521 public:
1522 DescriptorSet()
1523 : m_descriptorSet(VK_NULL_HANDLE)
1524 {}
1525
1526#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1527 DescriptorSet(VkDescriptorSet descriptorSet)
1528 : m_descriptorSet(descriptorSet)
1529 {}
1530
1531 DescriptorSet& operator=(VkDescriptorSet descriptorSet)
1532 {
1533 m_descriptorSet = descriptorSet;
1534 return *this;
1535 }
1536#endif
1537
Lenny Komowebf33162016-08-26 14:10:08 -06001538 bool operator==(DescriptorSet const &rhs) const
1539 {
1540 return m_descriptorSet == rhs.m_descriptorSet;
1541 }
1542
1543 bool operator!=(DescriptorSet const &rhs) const
1544 {
1545 return m_descriptorSet != rhs.m_descriptorSet;
1546 }
1547
1548 bool operator<(DescriptorSet const &rhs) const
1549 {
1550 return m_descriptorSet < rhs.m_descriptorSet;
1551 }
1552
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001553#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1554 explicit
1555#endif
1556 operator VkDescriptorSet() const
1557 {
1558 return m_descriptorSet;
1559 }
1560
1561 explicit operator bool() const
1562 {
1563 return m_descriptorSet != VK_NULL_HANDLE;
1564 }
1565
1566 bool operator!() const
1567 {
1568 return m_descriptorSet == VK_NULL_HANDLE;
1569 }
1570
1571 private:
1572 VkDescriptorSet m_descriptorSet;
1573 };
1574 static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
1575
1576 class DescriptorSetLayout
1577 {
1578 public:
1579 DescriptorSetLayout()
1580 : m_descriptorSetLayout(VK_NULL_HANDLE)
1581 {}
1582
1583#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1584 DescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout)
1585 : m_descriptorSetLayout(descriptorSetLayout)
1586 {}
1587
1588 DescriptorSetLayout& operator=(VkDescriptorSetLayout descriptorSetLayout)
1589 {
1590 m_descriptorSetLayout = descriptorSetLayout;
1591 return *this;
1592 }
1593#endif
1594
Lenny Komowebf33162016-08-26 14:10:08 -06001595 bool operator==(DescriptorSetLayout const &rhs) const
1596 {
1597 return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
1598 }
1599
1600 bool operator!=(DescriptorSetLayout const &rhs) const
1601 {
1602 return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
1603 }
1604
1605 bool operator<(DescriptorSetLayout const &rhs) const
1606 {
1607 return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
1608 }
1609
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001610#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1611 explicit
1612#endif
1613 operator VkDescriptorSetLayout() const
1614 {
1615 return m_descriptorSetLayout;
1616 }
1617
1618 explicit operator bool() const
1619 {
1620 return m_descriptorSetLayout != VK_NULL_HANDLE;
1621 }
1622
1623 bool operator!() const
1624 {
1625 return m_descriptorSetLayout == VK_NULL_HANDLE;
1626 }
1627
1628 private:
1629 VkDescriptorSetLayout m_descriptorSetLayout;
1630 };
1631 static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
1632
1633 class DescriptorPool
1634 {
1635 public:
1636 DescriptorPool()
1637 : m_descriptorPool(VK_NULL_HANDLE)
1638 {}
1639
1640#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1641 DescriptorPool(VkDescriptorPool descriptorPool)
1642 : m_descriptorPool(descriptorPool)
1643 {}
1644
1645 DescriptorPool& operator=(VkDescriptorPool descriptorPool)
1646 {
1647 m_descriptorPool = descriptorPool;
1648 return *this;
1649 }
1650#endif
1651
Lenny Komowebf33162016-08-26 14:10:08 -06001652 bool operator==(DescriptorPool const &rhs) const
1653 {
1654 return m_descriptorPool == rhs.m_descriptorPool;
1655 }
1656
1657 bool operator!=(DescriptorPool const &rhs) const
1658 {
1659 return m_descriptorPool != rhs.m_descriptorPool;
1660 }
1661
1662 bool operator<(DescriptorPool const &rhs) const
1663 {
1664 return m_descriptorPool < rhs.m_descriptorPool;
1665 }
1666
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001667#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1668 explicit
1669#endif
1670 operator VkDescriptorPool() const
1671 {
1672 return m_descriptorPool;
1673 }
1674
1675 explicit operator bool() const
1676 {
1677 return m_descriptorPool != VK_NULL_HANDLE;
1678 }
1679
1680 bool operator!() const
1681 {
1682 return m_descriptorPool == VK_NULL_HANDLE;
1683 }
1684
1685 private:
1686 VkDescriptorPool m_descriptorPool;
1687 };
1688 static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
1689
1690 class Fence
1691 {
1692 public:
1693 Fence()
1694 : m_fence(VK_NULL_HANDLE)
1695 {}
1696
1697#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1698 Fence(VkFence fence)
1699 : m_fence(fence)
1700 {}
1701
1702 Fence& operator=(VkFence fence)
1703 {
1704 m_fence = fence;
1705 return *this;
1706 }
1707#endif
1708
Lenny Komowebf33162016-08-26 14:10:08 -06001709 bool operator==(Fence const &rhs) const
1710 {
1711 return m_fence == rhs.m_fence;
1712 }
1713
1714 bool operator!=(Fence const &rhs) const
1715 {
1716 return m_fence != rhs.m_fence;
1717 }
1718
1719 bool operator<(Fence const &rhs) const
1720 {
1721 return m_fence < rhs.m_fence;
1722 }
1723
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001724#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1725 explicit
1726#endif
1727 operator VkFence() const
1728 {
1729 return m_fence;
1730 }
1731
1732 explicit operator bool() const
1733 {
1734 return m_fence != VK_NULL_HANDLE;
1735 }
1736
1737 bool operator!() const
1738 {
1739 return m_fence == VK_NULL_HANDLE;
1740 }
1741
1742 private:
1743 VkFence m_fence;
1744 };
1745 static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
1746
1747 class Semaphore
1748 {
1749 public:
1750 Semaphore()
1751 : m_semaphore(VK_NULL_HANDLE)
1752 {}
1753
1754#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1755 Semaphore(VkSemaphore semaphore)
1756 : m_semaphore(semaphore)
1757 {}
1758
1759 Semaphore& operator=(VkSemaphore semaphore)
1760 {
1761 m_semaphore = semaphore;
1762 return *this;
1763 }
1764#endif
1765
Lenny Komowebf33162016-08-26 14:10:08 -06001766 bool operator==(Semaphore const &rhs) const
1767 {
1768 return m_semaphore == rhs.m_semaphore;
1769 }
1770
1771 bool operator!=(Semaphore const &rhs) const
1772 {
1773 return m_semaphore != rhs.m_semaphore;
1774 }
1775
1776 bool operator<(Semaphore const &rhs) const
1777 {
1778 return m_semaphore < rhs.m_semaphore;
1779 }
1780
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001781#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1782 explicit
1783#endif
1784 operator VkSemaphore() const
1785 {
1786 return m_semaphore;
1787 }
1788
1789 explicit operator bool() const
1790 {
1791 return m_semaphore != VK_NULL_HANDLE;
1792 }
1793
1794 bool operator!() const
1795 {
1796 return m_semaphore == VK_NULL_HANDLE;
1797 }
1798
1799 private:
1800 VkSemaphore m_semaphore;
1801 };
1802 static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
1803
1804 class Event
1805 {
1806 public:
1807 Event()
1808 : m_event(VK_NULL_HANDLE)
1809 {}
1810
1811#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1812 Event(VkEvent event)
1813 : m_event(event)
1814 {}
1815
1816 Event& operator=(VkEvent event)
1817 {
1818 m_event = event;
1819 return *this;
1820 }
1821#endif
1822
Lenny Komowebf33162016-08-26 14:10:08 -06001823 bool operator==(Event const &rhs) const
1824 {
1825 return m_event == rhs.m_event;
1826 }
1827
1828 bool operator!=(Event const &rhs) const
1829 {
1830 return m_event != rhs.m_event;
1831 }
1832
1833 bool operator<(Event const &rhs) const
1834 {
1835 return m_event < rhs.m_event;
1836 }
1837
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001838#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1839 explicit
1840#endif
1841 operator VkEvent() const
1842 {
1843 return m_event;
1844 }
1845
1846 explicit operator bool() const
1847 {
1848 return m_event != VK_NULL_HANDLE;
1849 }
1850
1851 bool operator!() const
1852 {
1853 return m_event == VK_NULL_HANDLE;
1854 }
1855
1856 private:
1857 VkEvent m_event;
1858 };
1859 static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
1860
1861 class QueryPool
1862 {
1863 public:
1864 QueryPool()
1865 : m_queryPool(VK_NULL_HANDLE)
1866 {}
1867
1868#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1869 QueryPool(VkQueryPool queryPool)
1870 : m_queryPool(queryPool)
1871 {}
1872
1873 QueryPool& operator=(VkQueryPool queryPool)
1874 {
1875 m_queryPool = queryPool;
1876 return *this;
1877 }
1878#endif
1879
Lenny Komowebf33162016-08-26 14:10:08 -06001880 bool operator==(QueryPool const &rhs) const
1881 {
1882 return m_queryPool == rhs.m_queryPool;
1883 }
1884
1885 bool operator!=(QueryPool const &rhs) const
1886 {
1887 return m_queryPool != rhs.m_queryPool;
1888 }
1889
1890 bool operator<(QueryPool const &rhs) const
1891 {
1892 return m_queryPool < rhs.m_queryPool;
1893 }
1894
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001895#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1896 explicit
1897#endif
1898 operator VkQueryPool() const
1899 {
1900 return m_queryPool;
1901 }
1902
1903 explicit operator bool() const
1904 {
1905 return m_queryPool != VK_NULL_HANDLE;
1906 }
1907
1908 bool operator!() const
1909 {
1910 return m_queryPool == VK_NULL_HANDLE;
1911 }
1912
1913 private:
1914 VkQueryPool m_queryPool;
1915 };
1916 static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
1917
1918 class Framebuffer
1919 {
1920 public:
1921 Framebuffer()
1922 : m_framebuffer(VK_NULL_HANDLE)
1923 {}
1924
1925#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1926 Framebuffer(VkFramebuffer framebuffer)
1927 : m_framebuffer(framebuffer)
1928 {}
1929
1930 Framebuffer& operator=(VkFramebuffer framebuffer)
1931 {
1932 m_framebuffer = framebuffer;
1933 return *this;
1934 }
1935#endif
1936
Lenny Komowebf33162016-08-26 14:10:08 -06001937 bool operator==(Framebuffer const &rhs) const
1938 {
1939 return m_framebuffer == rhs.m_framebuffer;
1940 }
1941
1942 bool operator!=(Framebuffer const &rhs) const
1943 {
1944 return m_framebuffer != rhs.m_framebuffer;
1945 }
1946
1947 bool operator<(Framebuffer const &rhs) const
1948 {
1949 return m_framebuffer < rhs.m_framebuffer;
1950 }
1951
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001952#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1953 explicit
1954#endif
1955 operator VkFramebuffer() const
1956 {
1957 return m_framebuffer;
1958 }
1959
1960 explicit operator bool() const
1961 {
1962 return m_framebuffer != VK_NULL_HANDLE;
1963 }
1964
1965 bool operator!() const
1966 {
1967 return m_framebuffer == VK_NULL_HANDLE;
1968 }
1969
1970 private:
1971 VkFramebuffer m_framebuffer;
1972 };
1973 static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
1974
1975 class RenderPass
1976 {
1977 public:
1978 RenderPass()
1979 : m_renderPass(VK_NULL_HANDLE)
1980 {}
1981
1982#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1983 RenderPass(VkRenderPass renderPass)
1984 : m_renderPass(renderPass)
1985 {}
1986
1987 RenderPass& operator=(VkRenderPass renderPass)
1988 {
1989 m_renderPass = renderPass;
1990 return *this;
1991 }
1992#endif
1993
Lenny Komowebf33162016-08-26 14:10:08 -06001994 bool operator==(RenderPass const &rhs) const
1995 {
1996 return m_renderPass == rhs.m_renderPass;
1997 }
1998
1999 bool operator!=(RenderPass const &rhs) const
2000 {
2001 return m_renderPass != rhs.m_renderPass;
2002 }
2003
2004 bool operator<(RenderPass const &rhs) const
2005 {
2006 return m_renderPass < rhs.m_renderPass;
2007 }
2008
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002009#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2010 explicit
2011#endif
2012 operator VkRenderPass() const
2013 {
2014 return m_renderPass;
2015 }
2016
2017 explicit operator bool() const
2018 {
2019 return m_renderPass != VK_NULL_HANDLE;
2020 }
2021
2022 bool operator!() const
2023 {
2024 return m_renderPass == VK_NULL_HANDLE;
2025 }
2026
2027 private:
2028 VkRenderPass m_renderPass;
2029 };
2030 static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
2031
2032 class PipelineCache
2033 {
2034 public:
2035 PipelineCache()
2036 : m_pipelineCache(VK_NULL_HANDLE)
2037 {}
2038
2039#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2040 PipelineCache(VkPipelineCache pipelineCache)
2041 : m_pipelineCache(pipelineCache)
2042 {}
2043
2044 PipelineCache& operator=(VkPipelineCache pipelineCache)
2045 {
2046 m_pipelineCache = pipelineCache;
2047 return *this;
2048 }
2049#endif
2050
Lenny Komowebf33162016-08-26 14:10:08 -06002051 bool operator==(PipelineCache const &rhs) const
2052 {
2053 return m_pipelineCache == rhs.m_pipelineCache;
2054 }
2055
2056 bool operator!=(PipelineCache const &rhs) const
2057 {
2058 return m_pipelineCache != rhs.m_pipelineCache;
2059 }
2060
2061 bool operator<(PipelineCache const &rhs) const
2062 {
2063 return m_pipelineCache < rhs.m_pipelineCache;
2064 }
2065
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002066#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2067 explicit
2068#endif
2069 operator VkPipelineCache() const
2070 {
2071 return m_pipelineCache;
2072 }
2073
2074 explicit operator bool() const
2075 {
2076 return m_pipelineCache != VK_NULL_HANDLE;
2077 }
2078
2079 bool operator!() const
2080 {
2081 return m_pipelineCache == VK_NULL_HANDLE;
2082 }
2083
2084 private:
2085 VkPipelineCache m_pipelineCache;
2086 };
2087 static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
2088
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002089 class ObjectTableNVX
2090 {
2091 public:
2092 ObjectTableNVX()
2093 : m_objectTableNVX(VK_NULL_HANDLE)
2094 {}
2095
2096#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2097 ObjectTableNVX(VkObjectTableNVX objectTableNVX)
2098 : m_objectTableNVX(objectTableNVX)
2099 {}
2100
2101 ObjectTableNVX& operator=(VkObjectTableNVX objectTableNVX)
2102 {
2103 m_objectTableNVX = objectTableNVX;
2104 return *this;
2105 }
2106#endif
2107
2108 bool operator==(ObjectTableNVX const &rhs) const
2109 {
2110 return m_objectTableNVX == rhs.m_objectTableNVX;
2111 }
2112
2113 bool operator!=(ObjectTableNVX const &rhs) const
2114 {
2115 return m_objectTableNVX != rhs.m_objectTableNVX;
2116 }
2117
2118 bool operator<(ObjectTableNVX const &rhs) const
2119 {
2120 return m_objectTableNVX < rhs.m_objectTableNVX;
2121 }
2122
2123#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2124 explicit
2125#endif
2126 operator VkObjectTableNVX() const
2127 {
2128 return m_objectTableNVX;
2129 }
2130
2131 explicit operator bool() const
2132 {
2133 return m_objectTableNVX != VK_NULL_HANDLE;
2134 }
2135
2136 bool operator!() const
2137 {
2138 return m_objectTableNVX == VK_NULL_HANDLE;
2139 }
2140
2141 private:
2142 VkObjectTableNVX m_objectTableNVX;
2143 };
2144 static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
2145
2146 class IndirectCommandsLayoutNVX
2147 {
2148 public:
2149 IndirectCommandsLayoutNVX()
2150 : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
2151 {}
2152
2153#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2154 IndirectCommandsLayoutNVX(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2155 : m_indirectCommandsLayoutNVX(indirectCommandsLayoutNVX)
2156 {}
2157
2158 IndirectCommandsLayoutNVX& operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2159 {
2160 m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
2161 return *this;
2162 }
2163#endif
2164
2165 bool operator==(IndirectCommandsLayoutNVX const &rhs) const
2166 {
2167 return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
2168 }
2169
2170 bool operator!=(IndirectCommandsLayoutNVX const &rhs) const
2171 {
2172 return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
2173 }
2174
2175 bool operator<(IndirectCommandsLayoutNVX const &rhs) const
2176 {
2177 return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
2178 }
2179
2180#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2181 explicit
2182#endif
2183 operator VkIndirectCommandsLayoutNVX() const
2184 {
2185 return m_indirectCommandsLayoutNVX;
2186 }
2187
2188 explicit operator bool() const
2189 {
2190 return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
2191 }
2192
2193 bool operator!() const
2194 {
2195 return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
2196 }
2197
2198 private:
2199 VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
2200 };
2201 static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
2202
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002203 class DisplayKHR
2204 {
2205 public:
2206 DisplayKHR()
2207 : m_displayKHR(VK_NULL_HANDLE)
2208 {}
2209
2210#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2211 DisplayKHR(VkDisplayKHR displayKHR)
2212 : m_displayKHR(displayKHR)
2213 {}
2214
2215 DisplayKHR& operator=(VkDisplayKHR displayKHR)
2216 {
2217 m_displayKHR = displayKHR;
2218 return *this;
2219 }
2220#endif
2221
Lenny Komowebf33162016-08-26 14:10:08 -06002222 bool operator==(DisplayKHR const &rhs) const
2223 {
2224 return m_displayKHR == rhs.m_displayKHR;
2225 }
2226
2227 bool operator!=(DisplayKHR const &rhs) const
2228 {
2229 return m_displayKHR != rhs.m_displayKHR;
2230 }
2231
2232 bool operator<(DisplayKHR const &rhs) const
2233 {
2234 return m_displayKHR < rhs.m_displayKHR;
2235 }
2236
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002237#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2238 explicit
2239#endif
2240 operator VkDisplayKHR() const
2241 {
2242 return m_displayKHR;
2243 }
2244
2245 explicit operator bool() const
2246 {
2247 return m_displayKHR != VK_NULL_HANDLE;
2248 }
2249
2250 bool operator!() const
2251 {
2252 return m_displayKHR == VK_NULL_HANDLE;
2253 }
2254
2255 private:
2256 VkDisplayKHR m_displayKHR;
2257 };
2258 static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
2259
2260 class DisplayModeKHR
2261 {
2262 public:
2263 DisplayModeKHR()
2264 : m_displayModeKHR(VK_NULL_HANDLE)
2265 {}
2266
2267#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2268 DisplayModeKHR(VkDisplayModeKHR displayModeKHR)
2269 : m_displayModeKHR(displayModeKHR)
2270 {}
2271
2272 DisplayModeKHR& operator=(VkDisplayModeKHR displayModeKHR)
2273 {
2274 m_displayModeKHR = displayModeKHR;
2275 return *this;
2276 }
2277#endif
2278
Lenny Komowebf33162016-08-26 14:10:08 -06002279 bool operator==(DisplayModeKHR const &rhs) const
2280 {
2281 return m_displayModeKHR == rhs.m_displayModeKHR;
2282 }
2283
2284 bool operator!=(DisplayModeKHR const &rhs) const
2285 {
2286 return m_displayModeKHR != rhs.m_displayModeKHR;
2287 }
2288
2289 bool operator<(DisplayModeKHR const &rhs) const
2290 {
2291 return m_displayModeKHR < rhs.m_displayModeKHR;
2292 }
2293
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002294#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2295 explicit
2296#endif
2297 operator VkDisplayModeKHR() const
2298 {
2299 return m_displayModeKHR;
2300 }
2301
2302 explicit operator bool() const
2303 {
2304 return m_displayModeKHR != VK_NULL_HANDLE;
2305 }
2306
2307 bool operator!() const
2308 {
2309 return m_displayModeKHR == VK_NULL_HANDLE;
2310 }
2311
2312 private:
2313 VkDisplayModeKHR m_displayModeKHR;
2314 };
2315 static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
2316
2317 class SurfaceKHR
2318 {
2319 public:
2320 SurfaceKHR()
2321 : m_surfaceKHR(VK_NULL_HANDLE)
2322 {}
2323
2324#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2325 SurfaceKHR(VkSurfaceKHR surfaceKHR)
2326 : m_surfaceKHR(surfaceKHR)
2327 {}
2328
2329 SurfaceKHR& operator=(VkSurfaceKHR surfaceKHR)
2330 {
2331 m_surfaceKHR = surfaceKHR;
2332 return *this;
2333 }
2334#endif
2335
Lenny Komowebf33162016-08-26 14:10:08 -06002336 bool operator==(SurfaceKHR const &rhs) const
2337 {
2338 return m_surfaceKHR == rhs.m_surfaceKHR;
2339 }
2340
2341 bool operator!=(SurfaceKHR const &rhs) const
2342 {
2343 return m_surfaceKHR != rhs.m_surfaceKHR;
2344 }
2345
2346 bool operator<(SurfaceKHR const &rhs) const
2347 {
2348 return m_surfaceKHR < rhs.m_surfaceKHR;
2349 }
2350
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002351#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2352 explicit
2353#endif
2354 operator VkSurfaceKHR() const
2355 {
2356 return m_surfaceKHR;
2357 }
2358
2359 explicit operator bool() const
2360 {
2361 return m_surfaceKHR != VK_NULL_HANDLE;
2362 }
2363
2364 bool operator!() const
2365 {
2366 return m_surfaceKHR == VK_NULL_HANDLE;
2367 }
2368
2369 private:
2370 VkSurfaceKHR m_surfaceKHR;
2371 };
2372 static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
2373
2374 class SwapchainKHR
2375 {
2376 public:
2377 SwapchainKHR()
2378 : m_swapchainKHR(VK_NULL_HANDLE)
2379 {}
2380
2381#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2382 SwapchainKHR(VkSwapchainKHR swapchainKHR)
2383 : m_swapchainKHR(swapchainKHR)
2384 {}
2385
2386 SwapchainKHR& operator=(VkSwapchainKHR swapchainKHR)
2387 {
2388 m_swapchainKHR = swapchainKHR;
2389 return *this;
2390 }
2391#endif
2392
Lenny Komowebf33162016-08-26 14:10:08 -06002393 bool operator==(SwapchainKHR const &rhs) const
2394 {
2395 return m_swapchainKHR == rhs.m_swapchainKHR;
2396 }
2397
2398 bool operator!=(SwapchainKHR const &rhs) const
2399 {
2400 return m_swapchainKHR != rhs.m_swapchainKHR;
2401 }
2402
2403 bool operator<(SwapchainKHR const &rhs) const
2404 {
2405 return m_swapchainKHR < rhs.m_swapchainKHR;
2406 }
2407
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002408#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2409 explicit
2410#endif
2411 operator VkSwapchainKHR() const
2412 {
2413 return m_swapchainKHR;
2414 }
2415
2416 explicit operator bool() const
2417 {
2418 return m_swapchainKHR != VK_NULL_HANDLE;
2419 }
2420
2421 bool operator!() const
2422 {
2423 return m_swapchainKHR == VK_NULL_HANDLE;
2424 }
2425
2426 private:
2427 VkSwapchainKHR m_swapchainKHR;
2428 };
2429 static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
2430
2431 class DebugReportCallbackEXT
2432 {
2433 public:
2434 DebugReportCallbackEXT()
2435 : m_debugReportCallbackEXT(VK_NULL_HANDLE)
2436 {}
2437
2438#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2439 DebugReportCallbackEXT(VkDebugReportCallbackEXT debugReportCallbackEXT)
2440 : m_debugReportCallbackEXT(debugReportCallbackEXT)
2441 {}
2442
2443 DebugReportCallbackEXT& operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
2444 {
2445 m_debugReportCallbackEXT = debugReportCallbackEXT;
2446 return *this;
2447 }
2448#endif
2449
Lenny Komowebf33162016-08-26 14:10:08 -06002450 bool operator==(DebugReportCallbackEXT const &rhs) const
2451 {
2452 return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
2453 }
2454
2455 bool operator!=(DebugReportCallbackEXT const &rhs) const
2456 {
2457 return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
2458 }
2459
2460 bool operator<(DebugReportCallbackEXT const &rhs) const
2461 {
2462 return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
2463 }
2464
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002465#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2466 explicit
2467#endif
2468 operator VkDebugReportCallbackEXT() const
2469 {
2470 return m_debugReportCallbackEXT;
2471 }
2472
2473 explicit operator bool() const
2474 {
2475 return m_debugReportCallbackEXT != VK_NULL_HANDLE;
2476 }
2477
2478 bool operator!() const
2479 {
2480 return m_debugReportCallbackEXT == VK_NULL_HANDLE;
2481 }
2482
2483 private:
2484 VkDebugReportCallbackEXT m_debugReportCallbackEXT;
2485 };
2486 static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
2487
2488 struct Offset2D
2489 {
2490 Offset2D( int32_t x_ = 0, int32_t y_ = 0 )
2491 : x( x_ )
2492 , y( y_ )
2493 {
2494 }
2495
2496 Offset2D( VkOffset2D const & rhs )
2497 {
2498 memcpy( this, &rhs, sizeof(Offset2D) );
2499 }
2500
2501 Offset2D& operator=( VkOffset2D const & rhs )
2502 {
2503 memcpy( this, &rhs, sizeof(Offset2D) );
2504 return *this;
2505 }
2506
2507 Offset2D& setX( int32_t x_ )
2508 {
2509 x = x_;
2510 return *this;
2511 }
2512
2513 Offset2D& setY( int32_t y_ )
2514 {
2515 y = y_;
2516 return *this;
2517 }
2518
2519 operator const VkOffset2D&() const
2520 {
2521 return *reinterpret_cast<const VkOffset2D*>(this);
2522 }
2523
2524 bool operator==( Offset2D const& rhs ) const
2525 {
2526 return ( x == rhs.x )
2527 && ( y == rhs.y );
2528 }
2529
2530 bool operator!=( Offset2D const& rhs ) const
2531 {
2532 return !operator==( rhs );
2533 }
2534
2535 int32_t x;
2536 int32_t y;
2537 };
2538 static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
2539
2540 struct Offset3D
2541 {
2542 Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 )
2543 : x( x_ )
2544 , y( y_ )
2545 , z( z_ )
2546 {
2547 }
2548
2549 Offset3D( VkOffset3D const & rhs )
2550 {
2551 memcpy( this, &rhs, sizeof(Offset3D) );
2552 }
2553
2554 Offset3D& operator=( VkOffset3D const & rhs )
2555 {
2556 memcpy( this, &rhs, sizeof(Offset3D) );
2557 return *this;
2558 }
2559
2560 Offset3D& setX( int32_t x_ )
2561 {
2562 x = x_;
2563 return *this;
2564 }
2565
2566 Offset3D& setY( int32_t y_ )
2567 {
2568 y = y_;
2569 return *this;
2570 }
2571
2572 Offset3D& setZ( int32_t z_ )
2573 {
2574 z = z_;
2575 return *this;
2576 }
2577
2578 operator const VkOffset3D&() const
2579 {
2580 return *reinterpret_cast<const VkOffset3D*>(this);
2581 }
2582
2583 bool operator==( Offset3D const& rhs ) const
2584 {
2585 return ( x == rhs.x )
2586 && ( y == rhs.y )
2587 && ( z == rhs.z );
2588 }
2589
2590 bool operator!=( Offset3D const& rhs ) const
2591 {
2592 return !operator==( rhs );
2593 }
2594
2595 int32_t x;
2596 int32_t y;
2597 int32_t z;
2598 };
2599 static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
2600
2601 struct Extent2D
2602 {
2603 Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 )
2604 : width( width_ )
2605 , height( height_ )
2606 {
2607 }
2608
2609 Extent2D( VkExtent2D const & rhs )
2610 {
2611 memcpy( this, &rhs, sizeof(Extent2D) );
2612 }
2613
2614 Extent2D& operator=( VkExtent2D const & rhs )
2615 {
2616 memcpy( this, &rhs, sizeof(Extent2D) );
2617 return *this;
2618 }
2619
2620 Extent2D& setWidth( uint32_t width_ )
2621 {
2622 width = width_;
2623 return *this;
2624 }
2625
2626 Extent2D& setHeight( uint32_t height_ )
2627 {
2628 height = height_;
2629 return *this;
2630 }
2631
2632 operator const VkExtent2D&() const
2633 {
2634 return *reinterpret_cast<const VkExtent2D*>(this);
2635 }
2636
2637 bool operator==( Extent2D const& rhs ) const
2638 {
2639 return ( width == rhs.width )
2640 && ( height == rhs.height );
2641 }
2642
2643 bool operator!=( Extent2D const& rhs ) const
2644 {
2645 return !operator==( rhs );
2646 }
2647
2648 uint32_t width;
2649 uint32_t height;
2650 };
2651 static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
2652
2653 struct Extent3D
2654 {
2655 Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 )
2656 : width( width_ )
2657 , height( height_ )
2658 , depth( depth_ )
2659 {
2660 }
2661
2662 Extent3D( VkExtent3D const & rhs )
2663 {
2664 memcpy( this, &rhs, sizeof(Extent3D) );
2665 }
2666
2667 Extent3D& operator=( VkExtent3D const & rhs )
2668 {
2669 memcpy( this, &rhs, sizeof(Extent3D) );
2670 return *this;
2671 }
2672
2673 Extent3D& setWidth( uint32_t width_ )
2674 {
2675 width = width_;
2676 return *this;
2677 }
2678
2679 Extent3D& setHeight( uint32_t height_ )
2680 {
2681 height = height_;
2682 return *this;
2683 }
2684
2685 Extent3D& setDepth( uint32_t depth_ )
2686 {
2687 depth = depth_;
2688 return *this;
2689 }
2690
2691 operator const VkExtent3D&() const
2692 {
2693 return *reinterpret_cast<const VkExtent3D*>(this);
2694 }
2695
2696 bool operator==( Extent3D const& rhs ) const
2697 {
2698 return ( width == rhs.width )
2699 && ( height == rhs.height )
2700 && ( depth == rhs.depth );
2701 }
2702
2703 bool operator!=( Extent3D const& rhs ) const
2704 {
2705 return !operator==( rhs );
2706 }
2707
2708 uint32_t width;
2709 uint32_t height;
2710 uint32_t depth;
2711 };
2712 static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
2713
2714 struct Viewport
2715 {
2716 Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 )
2717 : x( x_ )
2718 , y( y_ )
2719 , width( width_ )
2720 , height( height_ )
2721 , minDepth( minDepth_ )
2722 , maxDepth( maxDepth_ )
2723 {
2724 }
2725
2726 Viewport( VkViewport const & rhs )
2727 {
2728 memcpy( this, &rhs, sizeof(Viewport) );
2729 }
2730
2731 Viewport& operator=( VkViewport const & rhs )
2732 {
2733 memcpy( this, &rhs, sizeof(Viewport) );
2734 return *this;
2735 }
2736
2737 Viewport& setX( float x_ )
2738 {
2739 x = x_;
2740 return *this;
2741 }
2742
2743 Viewport& setY( float y_ )
2744 {
2745 y = y_;
2746 return *this;
2747 }
2748
2749 Viewport& setWidth( float width_ )
2750 {
2751 width = width_;
2752 return *this;
2753 }
2754
2755 Viewport& setHeight( float height_ )
2756 {
2757 height = height_;
2758 return *this;
2759 }
2760
2761 Viewport& setMinDepth( float minDepth_ )
2762 {
2763 minDepth = minDepth_;
2764 return *this;
2765 }
2766
2767 Viewport& setMaxDepth( float maxDepth_ )
2768 {
2769 maxDepth = maxDepth_;
2770 return *this;
2771 }
2772
2773 operator const VkViewport&() const
2774 {
2775 return *reinterpret_cast<const VkViewport*>(this);
2776 }
2777
2778 bool operator==( Viewport const& rhs ) const
2779 {
2780 return ( x == rhs.x )
2781 && ( y == rhs.y )
2782 && ( width == rhs.width )
2783 && ( height == rhs.height )
2784 && ( minDepth == rhs.minDepth )
2785 && ( maxDepth == rhs.maxDepth );
2786 }
2787
2788 bool operator!=( Viewport const& rhs ) const
2789 {
2790 return !operator==( rhs );
2791 }
2792
2793 float x;
2794 float y;
2795 float width;
2796 float height;
2797 float minDepth;
2798 float maxDepth;
2799 };
2800 static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
2801
2802 struct Rect2D
2803 {
2804 Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() )
2805 : offset( offset_ )
2806 , extent( extent_ )
2807 {
2808 }
2809
2810 Rect2D( VkRect2D const & rhs )
2811 {
2812 memcpy( this, &rhs, sizeof(Rect2D) );
2813 }
2814
2815 Rect2D& operator=( VkRect2D const & rhs )
2816 {
2817 memcpy( this, &rhs, sizeof(Rect2D) );
2818 return *this;
2819 }
2820
2821 Rect2D& setOffset( Offset2D offset_ )
2822 {
2823 offset = offset_;
2824 return *this;
2825 }
2826
2827 Rect2D& setExtent( Extent2D extent_ )
2828 {
2829 extent = extent_;
2830 return *this;
2831 }
2832
2833 operator const VkRect2D&() const
2834 {
2835 return *reinterpret_cast<const VkRect2D*>(this);
2836 }
2837
2838 bool operator==( Rect2D const& rhs ) const
2839 {
2840 return ( offset == rhs.offset )
2841 && ( extent == rhs.extent );
2842 }
2843
2844 bool operator!=( Rect2D const& rhs ) const
2845 {
2846 return !operator==( rhs );
2847 }
2848
2849 Offset2D offset;
2850 Extent2D extent;
2851 };
2852 static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
2853
2854 struct ClearRect
2855 {
2856 ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
2857 : rect( rect_ )
2858 , baseArrayLayer( baseArrayLayer_ )
2859 , layerCount( layerCount_ )
2860 {
2861 }
2862
2863 ClearRect( VkClearRect const & rhs )
2864 {
2865 memcpy( this, &rhs, sizeof(ClearRect) );
2866 }
2867
2868 ClearRect& operator=( VkClearRect const & rhs )
2869 {
2870 memcpy( this, &rhs, sizeof(ClearRect) );
2871 return *this;
2872 }
2873
2874 ClearRect& setRect( Rect2D rect_ )
2875 {
2876 rect = rect_;
2877 return *this;
2878 }
2879
2880 ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
2881 {
2882 baseArrayLayer = baseArrayLayer_;
2883 return *this;
2884 }
2885
2886 ClearRect& setLayerCount( uint32_t layerCount_ )
2887 {
2888 layerCount = layerCount_;
2889 return *this;
2890 }
2891
2892 operator const VkClearRect&() const
2893 {
2894 return *reinterpret_cast<const VkClearRect*>(this);
2895 }
2896
2897 bool operator==( ClearRect const& rhs ) const
2898 {
2899 return ( rect == rhs.rect )
2900 && ( baseArrayLayer == rhs.baseArrayLayer )
2901 && ( layerCount == rhs.layerCount );
2902 }
2903
2904 bool operator!=( ClearRect const& rhs ) const
2905 {
2906 return !operator==( rhs );
2907 }
2908
2909 Rect2D rect;
2910 uint32_t baseArrayLayer;
2911 uint32_t layerCount;
2912 };
2913 static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
2914
2915 struct ExtensionProperties
2916 {
2917 operator const VkExtensionProperties&() const
2918 {
2919 return *reinterpret_cast<const VkExtensionProperties*>(this);
2920 }
2921
2922 bool operator==( ExtensionProperties const& rhs ) const
2923 {
2924 return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
2925 && ( specVersion == rhs.specVersion );
2926 }
2927
2928 bool operator!=( ExtensionProperties const& rhs ) const
2929 {
2930 return !operator==( rhs );
2931 }
2932
2933 char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
2934 uint32_t specVersion;
2935 };
2936 static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
2937
2938 struct LayerProperties
2939 {
2940 operator const VkLayerProperties&() const
2941 {
2942 return *reinterpret_cast<const VkLayerProperties*>(this);
2943 }
2944
2945 bool operator==( LayerProperties const& rhs ) const
2946 {
2947 return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
2948 && ( specVersion == rhs.specVersion )
2949 && ( implementationVersion == rhs.implementationVersion )
2950 && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
2951 }
2952
2953 bool operator!=( LayerProperties const& rhs ) const
2954 {
2955 return !operator==( rhs );
2956 }
2957
2958 char layerName[VK_MAX_EXTENSION_NAME_SIZE];
2959 uint32_t specVersion;
2960 uint32_t implementationVersion;
2961 char description[VK_MAX_DESCRIPTION_SIZE];
2962 };
2963 static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
2964
2965 struct AllocationCallbacks
2966 {
2967 AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
2968 : pUserData( pUserData_ )
2969 , pfnAllocation( pfnAllocation_ )
2970 , pfnReallocation( pfnReallocation_ )
2971 , pfnFree( pfnFree_ )
2972 , pfnInternalAllocation( pfnInternalAllocation_ )
2973 , pfnInternalFree( pfnInternalFree_ )
2974 {
2975 }
2976
2977 AllocationCallbacks( VkAllocationCallbacks const & rhs )
2978 {
2979 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
2980 }
2981
2982 AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
2983 {
2984 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
2985 return *this;
2986 }
2987
2988 AllocationCallbacks& setPUserData( void* pUserData_ )
2989 {
2990 pUserData = pUserData_;
2991 return *this;
2992 }
2993
2994 AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
2995 {
2996 pfnAllocation = pfnAllocation_;
2997 return *this;
2998 }
2999
3000 AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
3001 {
3002 pfnReallocation = pfnReallocation_;
3003 return *this;
3004 }
3005
3006 AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
3007 {
3008 pfnFree = pfnFree_;
3009 return *this;
3010 }
3011
3012 AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
3013 {
3014 pfnInternalAllocation = pfnInternalAllocation_;
3015 return *this;
3016 }
3017
3018 AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
3019 {
3020 pfnInternalFree = pfnInternalFree_;
3021 return *this;
3022 }
3023
3024 operator const VkAllocationCallbacks&() const
3025 {
3026 return *reinterpret_cast<const VkAllocationCallbacks*>(this);
3027 }
3028
3029 bool operator==( AllocationCallbacks const& rhs ) const
3030 {
3031 return ( pUserData == rhs.pUserData )
3032 && ( pfnAllocation == rhs.pfnAllocation )
3033 && ( pfnReallocation == rhs.pfnReallocation )
3034 && ( pfnFree == rhs.pfnFree )
3035 && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
3036 && ( pfnInternalFree == rhs.pfnInternalFree );
3037 }
3038
3039 bool operator!=( AllocationCallbacks const& rhs ) const
3040 {
3041 return !operator==( rhs );
3042 }
3043
3044 void* pUserData;
3045 PFN_vkAllocationFunction pfnAllocation;
3046 PFN_vkReallocationFunction pfnReallocation;
3047 PFN_vkFreeFunction pfnFree;
3048 PFN_vkInternalAllocationNotification pfnInternalAllocation;
3049 PFN_vkInternalFreeNotification pfnInternalFree;
3050 };
3051 static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
3052
3053 struct MemoryRequirements
3054 {
3055 operator const VkMemoryRequirements&() const
3056 {
3057 return *reinterpret_cast<const VkMemoryRequirements*>(this);
3058 }
3059
3060 bool operator==( MemoryRequirements const& rhs ) const
3061 {
3062 return ( size == rhs.size )
3063 && ( alignment == rhs.alignment )
3064 && ( memoryTypeBits == rhs.memoryTypeBits );
3065 }
3066
3067 bool operator!=( MemoryRequirements const& rhs ) const
3068 {
3069 return !operator==( rhs );
3070 }
3071
3072 DeviceSize size;
3073 DeviceSize alignment;
3074 uint32_t memoryTypeBits;
3075 };
3076 static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
3077
3078 struct DescriptorBufferInfo
3079 {
3080 DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 )
3081 : buffer( buffer_ )
3082 , offset( offset_ )
3083 , range( range_ )
3084 {
3085 }
3086
3087 DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
3088 {
3089 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3090 }
3091
3092 DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
3093 {
3094 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3095 return *this;
3096 }
3097
3098 DescriptorBufferInfo& setBuffer( Buffer buffer_ )
3099 {
3100 buffer = buffer_;
3101 return *this;
3102 }
3103
3104 DescriptorBufferInfo& setOffset( DeviceSize offset_ )
3105 {
3106 offset = offset_;
3107 return *this;
3108 }
3109
3110 DescriptorBufferInfo& setRange( DeviceSize range_ )
3111 {
3112 range = range_;
3113 return *this;
3114 }
3115
3116 operator const VkDescriptorBufferInfo&() const
3117 {
3118 return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
3119 }
3120
3121 bool operator==( DescriptorBufferInfo const& rhs ) const
3122 {
3123 return ( buffer == rhs.buffer )
3124 && ( offset == rhs.offset )
3125 && ( range == rhs.range );
3126 }
3127
3128 bool operator!=( DescriptorBufferInfo const& rhs ) const
3129 {
3130 return !operator==( rhs );
3131 }
3132
3133 Buffer buffer;
3134 DeviceSize offset;
3135 DeviceSize range;
3136 };
3137 static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
3138
3139 struct SubresourceLayout
3140 {
3141 operator const VkSubresourceLayout&() const
3142 {
3143 return *reinterpret_cast<const VkSubresourceLayout*>(this);
3144 }
3145
3146 bool operator==( SubresourceLayout const& rhs ) const
3147 {
3148 return ( offset == rhs.offset )
3149 && ( size == rhs.size )
3150 && ( rowPitch == rhs.rowPitch )
3151 && ( arrayPitch == rhs.arrayPitch )
3152 && ( depthPitch == rhs.depthPitch );
3153 }
3154
3155 bool operator!=( SubresourceLayout const& rhs ) const
3156 {
3157 return !operator==( rhs );
3158 }
3159
3160 DeviceSize offset;
3161 DeviceSize size;
3162 DeviceSize rowPitch;
3163 DeviceSize arrayPitch;
3164 DeviceSize depthPitch;
3165 };
3166 static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
3167
3168 struct BufferCopy
3169 {
3170 BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 )
3171 : srcOffset( srcOffset_ )
3172 , dstOffset( dstOffset_ )
3173 , size( size_ )
3174 {
3175 }
3176
3177 BufferCopy( VkBufferCopy const & rhs )
3178 {
3179 memcpy( this, &rhs, sizeof(BufferCopy) );
3180 }
3181
3182 BufferCopy& operator=( VkBufferCopy const & rhs )
3183 {
3184 memcpy( this, &rhs, sizeof(BufferCopy) );
3185 return *this;
3186 }
3187
3188 BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
3189 {
3190 srcOffset = srcOffset_;
3191 return *this;
3192 }
3193
3194 BufferCopy& setDstOffset( DeviceSize dstOffset_ )
3195 {
3196 dstOffset = dstOffset_;
3197 return *this;
3198 }
3199
3200 BufferCopy& setSize( DeviceSize size_ )
3201 {
3202 size = size_;
3203 return *this;
3204 }
3205
3206 operator const VkBufferCopy&() const
3207 {
3208 return *reinterpret_cast<const VkBufferCopy*>(this);
3209 }
3210
3211 bool operator==( BufferCopy const& rhs ) const
3212 {
3213 return ( srcOffset == rhs.srcOffset )
3214 && ( dstOffset == rhs.dstOffset )
3215 && ( size == rhs.size );
3216 }
3217
3218 bool operator!=( BufferCopy const& rhs ) const
3219 {
3220 return !operator==( rhs );
3221 }
3222
3223 DeviceSize srcOffset;
3224 DeviceSize dstOffset;
3225 DeviceSize size;
3226 };
3227 static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
3228
3229 struct SpecializationMapEntry
3230 {
3231 SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 )
3232 : constantID( constantID_ )
3233 , offset( offset_ )
3234 , size( size_ )
3235 {
3236 }
3237
3238 SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
3239 {
3240 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3241 }
3242
3243 SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
3244 {
3245 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3246 return *this;
3247 }
3248
3249 SpecializationMapEntry& setConstantID( uint32_t constantID_ )
3250 {
3251 constantID = constantID_;
3252 return *this;
3253 }
3254
3255 SpecializationMapEntry& setOffset( uint32_t offset_ )
3256 {
3257 offset = offset_;
3258 return *this;
3259 }
3260
3261 SpecializationMapEntry& setSize( size_t size_ )
3262 {
3263 size = size_;
3264 return *this;
3265 }
3266
3267 operator const VkSpecializationMapEntry&() const
3268 {
3269 return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
3270 }
3271
3272 bool operator==( SpecializationMapEntry const& rhs ) const
3273 {
3274 return ( constantID == rhs.constantID )
3275 && ( offset == rhs.offset )
3276 && ( size == rhs.size );
3277 }
3278
3279 bool operator!=( SpecializationMapEntry const& rhs ) const
3280 {
3281 return !operator==( rhs );
3282 }
3283
3284 uint32_t constantID;
3285 uint32_t offset;
3286 size_t size;
3287 };
3288 static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
3289
3290 struct SpecializationInfo
3291 {
3292 SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr )
3293 : mapEntryCount( mapEntryCount_ )
3294 , pMapEntries( pMapEntries_ )
3295 , dataSize( dataSize_ )
3296 , pData( pData_ )
3297 {
3298 }
3299
3300 SpecializationInfo( VkSpecializationInfo const & rhs )
3301 {
3302 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3303 }
3304
3305 SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
3306 {
3307 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3308 return *this;
3309 }
3310
3311 SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
3312 {
3313 mapEntryCount = mapEntryCount_;
3314 return *this;
3315 }
3316
3317 SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
3318 {
3319 pMapEntries = pMapEntries_;
3320 return *this;
3321 }
3322
3323 SpecializationInfo& setDataSize( size_t dataSize_ )
3324 {
3325 dataSize = dataSize_;
3326 return *this;
3327 }
3328
3329 SpecializationInfo& setPData( const void* pData_ )
3330 {
3331 pData = pData_;
3332 return *this;
3333 }
3334
3335 operator const VkSpecializationInfo&() const
3336 {
3337 return *reinterpret_cast<const VkSpecializationInfo*>(this);
3338 }
3339
3340 bool operator==( SpecializationInfo const& rhs ) const
3341 {
3342 return ( mapEntryCount == rhs.mapEntryCount )
3343 && ( pMapEntries == rhs.pMapEntries )
3344 && ( dataSize == rhs.dataSize )
3345 && ( pData == rhs.pData );
3346 }
3347
3348 bool operator!=( SpecializationInfo const& rhs ) const
3349 {
3350 return !operator==( rhs );
3351 }
3352
3353 uint32_t mapEntryCount;
3354 const SpecializationMapEntry* pMapEntries;
3355 size_t dataSize;
3356 const void* pData;
3357 };
3358 static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
3359
3360 union ClearColorValue
3361 {
3362 ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
3363 {
3364 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3365 }
3366
3367 ClearColorValue( const std::array<int32_t,4>& int32_ )
3368 {
3369 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3370 }
3371
3372 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
3373 {
3374 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3375 }
3376
3377 ClearColorValue& setFloat32( std::array<float,4> float32_ )
3378 {
3379 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3380 return *this;
3381 }
3382
3383 ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
3384 {
3385 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3386 return *this;
3387 }
3388
3389 ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
3390 {
3391 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3392 return *this;
3393 }
3394
3395 operator VkClearColorValue const& () const
3396 {
3397 return *reinterpret_cast<const VkClearColorValue*>(this);
3398 }
3399
3400 float float32[4];
3401 int32_t int32[4];
3402 uint32_t uint32[4];
3403 };
3404
3405 struct ClearDepthStencilValue
3406 {
3407 ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 )
3408 : depth( depth_ )
3409 , stencil( stencil_ )
3410 {
3411 }
3412
3413 ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
3414 {
3415 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3416 }
3417
3418 ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
3419 {
3420 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3421 return *this;
3422 }
3423
3424 ClearDepthStencilValue& setDepth( float depth_ )
3425 {
3426 depth = depth_;
3427 return *this;
3428 }
3429
3430 ClearDepthStencilValue& setStencil( uint32_t stencil_ )
3431 {
3432 stencil = stencil_;
3433 return *this;
3434 }
3435
3436 operator const VkClearDepthStencilValue&() const
3437 {
3438 return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
3439 }
3440
3441 bool operator==( ClearDepthStencilValue const& rhs ) const
3442 {
3443 return ( depth == rhs.depth )
3444 && ( stencil == rhs.stencil );
3445 }
3446
3447 bool operator!=( ClearDepthStencilValue const& rhs ) const
3448 {
3449 return !operator==( rhs );
3450 }
3451
3452 float depth;
3453 uint32_t stencil;
3454 };
3455 static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
3456
3457 union ClearValue
3458 {
3459 ClearValue( ClearColorValue color_ = ClearColorValue() )
3460 {
3461 color = color_;
3462 }
3463
3464 ClearValue( ClearDepthStencilValue depthStencil_ )
3465 {
3466 depthStencil = depthStencil_;
3467 }
3468
3469 ClearValue& setColor( ClearColorValue color_ )
3470 {
3471 color = color_;
3472 return *this;
3473 }
3474
3475 ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
3476 {
3477 depthStencil = depthStencil_;
3478 return *this;
3479 }
3480
3481 operator VkClearValue const& () const
3482 {
3483 return *reinterpret_cast<const VkClearValue*>(this);
3484 }
3485
3486#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3487 ClearColorValue color;
3488 ClearDepthStencilValue depthStencil;
3489#else
3490 VkClearColorValue color;
3491 VkClearDepthStencilValue depthStencil;
3492#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3493 };
3494
3495 struct PhysicalDeviceFeatures
3496 {
3497 PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 )
3498 : robustBufferAccess( robustBufferAccess_ )
3499 , fullDrawIndexUint32( fullDrawIndexUint32_ )
3500 , imageCubeArray( imageCubeArray_ )
3501 , independentBlend( independentBlend_ )
3502 , geometryShader( geometryShader_ )
3503 , tessellationShader( tessellationShader_ )
3504 , sampleRateShading( sampleRateShading_ )
3505 , dualSrcBlend( dualSrcBlend_ )
3506 , logicOp( logicOp_ )
3507 , multiDrawIndirect( multiDrawIndirect_ )
3508 , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
3509 , depthClamp( depthClamp_ )
3510 , depthBiasClamp( depthBiasClamp_ )
3511 , fillModeNonSolid( fillModeNonSolid_ )
3512 , depthBounds( depthBounds_ )
3513 , wideLines( wideLines_ )
3514 , largePoints( largePoints_ )
3515 , alphaToOne( alphaToOne_ )
3516 , multiViewport( multiViewport_ )
3517 , samplerAnisotropy( samplerAnisotropy_ )
3518 , textureCompressionETC2( textureCompressionETC2_ )
3519 , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
3520 , textureCompressionBC( textureCompressionBC_ )
3521 , occlusionQueryPrecise( occlusionQueryPrecise_ )
3522 , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
3523 , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
3524 , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
3525 , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
3526 , shaderImageGatherExtended( shaderImageGatherExtended_ )
3527 , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
3528 , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
3529 , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
3530 , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
3531 , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
3532 , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
3533 , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
3534 , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
3535 , shaderClipDistance( shaderClipDistance_ )
3536 , shaderCullDistance( shaderCullDistance_ )
3537 , shaderFloat64( shaderFloat64_ )
3538 , shaderInt64( shaderInt64_ )
3539 , shaderInt16( shaderInt16_ )
3540 , shaderResourceResidency( shaderResourceResidency_ )
3541 , shaderResourceMinLod( shaderResourceMinLod_ )
3542 , sparseBinding( sparseBinding_ )
3543 , sparseResidencyBuffer( sparseResidencyBuffer_ )
3544 , sparseResidencyImage2D( sparseResidencyImage2D_ )
3545 , sparseResidencyImage3D( sparseResidencyImage3D_ )
3546 , sparseResidency2Samples( sparseResidency2Samples_ )
3547 , sparseResidency4Samples( sparseResidency4Samples_ )
3548 , sparseResidency8Samples( sparseResidency8Samples_ )
3549 , sparseResidency16Samples( sparseResidency16Samples_ )
3550 , sparseResidencyAliased( sparseResidencyAliased_ )
3551 , variableMultisampleRate( variableMultisampleRate_ )
3552 , inheritedQueries( inheritedQueries_ )
3553 {
3554 }
3555
3556 PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
3557 {
3558 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3559 }
3560
3561 PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
3562 {
3563 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3564 return *this;
3565 }
3566
3567 PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
3568 {
3569 robustBufferAccess = robustBufferAccess_;
3570 return *this;
3571 }
3572
3573 PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
3574 {
3575 fullDrawIndexUint32 = fullDrawIndexUint32_;
3576 return *this;
3577 }
3578
3579 PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
3580 {
3581 imageCubeArray = imageCubeArray_;
3582 return *this;
3583 }
3584
3585 PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
3586 {
3587 independentBlend = independentBlend_;
3588 return *this;
3589 }
3590
3591 PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
3592 {
3593 geometryShader = geometryShader_;
3594 return *this;
3595 }
3596
3597 PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
3598 {
3599 tessellationShader = tessellationShader_;
3600 return *this;
3601 }
3602
3603 PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
3604 {
3605 sampleRateShading = sampleRateShading_;
3606 return *this;
3607 }
3608
3609 PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
3610 {
3611 dualSrcBlend = dualSrcBlend_;
3612 return *this;
3613 }
3614
3615 PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
3616 {
3617 logicOp = logicOp_;
3618 return *this;
3619 }
3620
3621 PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
3622 {
3623 multiDrawIndirect = multiDrawIndirect_;
3624 return *this;
3625 }
3626
3627 PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
3628 {
3629 drawIndirectFirstInstance = drawIndirectFirstInstance_;
3630 return *this;
3631 }
3632
3633 PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
3634 {
3635 depthClamp = depthClamp_;
3636 return *this;
3637 }
3638
3639 PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
3640 {
3641 depthBiasClamp = depthBiasClamp_;
3642 return *this;
3643 }
3644
3645 PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
3646 {
3647 fillModeNonSolid = fillModeNonSolid_;
3648 return *this;
3649 }
3650
3651 PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
3652 {
3653 depthBounds = depthBounds_;
3654 return *this;
3655 }
3656
3657 PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
3658 {
3659 wideLines = wideLines_;
3660 return *this;
3661 }
3662
3663 PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
3664 {
3665 largePoints = largePoints_;
3666 return *this;
3667 }
3668
3669 PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
3670 {
3671 alphaToOne = alphaToOne_;
3672 return *this;
3673 }
3674
3675 PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
3676 {
3677 multiViewport = multiViewport_;
3678 return *this;
3679 }
3680
3681 PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
3682 {
3683 samplerAnisotropy = samplerAnisotropy_;
3684 return *this;
3685 }
3686
3687 PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
3688 {
3689 textureCompressionETC2 = textureCompressionETC2_;
3690 return *this;
3691 }
3692
3693 PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
3694 {
3695 textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
3696 return *this;
3697 }
3698
3699 PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
3700 {
3701 textureCompressionBC = textureCompressionBC_;
3702 return *this;
3703 }
3704
3705 PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
3706 {
3707 occlusionQueryPrecise = occlusionQueryPrecise_;
3708 return *this;
3709 }
3710
3711 PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
3712 {
3713 pipelineStatisticsQuery = pipelineStatisticsQuery_;
3714 return *this;
3715 }
3716
3717 PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
3718 {
3719 vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
3720 return *this;
3721 }
3722
3723 PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
3724 {
3725 fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
3726 return *this;
3727 }
3728
3729 PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
3730 {
3731 shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
3732 return *this;
3733 }
3734
3735 PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
3736 {
3737 shaderImageGatherExtended = shaderImageGatherExtended_;
3738 return *this;
3739 }
3740
3741 PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
3742 {
3743 shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
3744 return *this;
3745 }
3746
3747 PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
3748 {
3749 shaderStorageImageMultisample = shaderStorageImageMultisample_;
3750 return *this;
3751 }
3752
3753 PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
3754 {
3755 shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
3756 return *this;
3757 }
3758
3759 PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
3760 {
3761 shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
3762 return *this;
3763 }
3764
3765 PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
3766 {
3767 shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
3768 return *this;
3769 }
3770
3771 PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
3772 {
3773 shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
3774 return *this;
3775 }
3776
3777 PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
3778 {
3779 shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
3780 return *this;
3781 }
3782
3783 PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
3784 {
3785 shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
3786 return *this;
3787 }
3788
3789 PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
3790 {
3791 shaderClipDistance = shaderClipDistance_;
3792 return *this;
3793 }
3794
3795 PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
3796 {
3797 shaderCullDistance = shaderCullDistance_;
3798 return *this;
3799 }
3800
3801 PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
3802 {
3803 shaderFloat64 = shaderFloat64_;
3804 return *this;
3805 }
3806
3807 PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
3808 {
3809 shaderInt64 = shaderInt64_;
3810 return *this;
3811 }
3812
3813 PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
3814 {
3815 shaderInt16 = shaderInt16_;
3816 return *this;
3817 }
3818
3819 PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
3820 {
3821 shaderResourceResidency = shaderResourceResidency_;
3822 return *this;
3823 }
3824
3825 PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
3826 {
3827 shaderResourceMinLod = shaderResourceMinLod_;
3828 return *this;
3829 }
3830
3831 PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
3832 {
3833 sparseBinding = sparseBinding_;
3834 return *this;
3835 }
3836
3837 PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
3838 {
3839 sparseResidencyBuffer = sparseResidencyBuffer_;
3840 return *this;
3841 }
3842
3843 PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
3844 {
3845 sparseResidencyImage2D = sparseResidencyImage2D_;
3846 return *this;
3847 }
3848
3849 PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
3850 {
3851 sparseResidencyImage3D = sparseResidencyImage3D_;
3852 return *this;
3853 }
3854
3855 PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
3856 {
3857 sparseResidency2Samples = sparseResidency2Samples_;
3858 return *this;
3859 }
3860
3861 PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
3862 {
3863 sparseResidency4Samples = sparseResidency4Samples_;
3864 return *this;
3865 }
3866
3867 PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
3868 {
3869 sparseResidency8Samples = sparseResidency8Samples_;
3870 return *this;
3871 }
3872
3873 PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
3874 {
3875 sparseResidency16Samples = sparseResidency16Samples_;
3876 return *this;
3877 }
3878
3879 PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
3880 {
3881 sparseResidencyAliased = sparseResidencyAliased_;
3882 return *this;
3883 }
3884
3885 PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
3886 {
3887 variableMultisampleRate = variableMultisampleRate_;
3888 return *this;
3889 }
3890
3891 PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
3892 {
3893 inheritedQueries = inheritedQueries_;
3894 return *this;
3895 }
3896
3897 operator const VkPhysicalDeviceFeatures&() const
3898 {
3899 return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
3900 }
3901
3902 bool operator==( PhysicalDeviceFeatures const& rhs ) const
3903 {
3904 return ( robustBufferAccess == rhs.robustBufferAccess )
3905 && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
3906 && ( imageCubeArray == rhs.imageCubeArray )
3907 && ( independentBlend == rhs.independentBlend )
3908 && ( geometryShader == rhs.geometryShader )
3909 && ( tessellationShader == rhs.tessellationShader )
3910 && ( sampleRateShading == rhs.sampleRateShading )
3911 && ( dualSrcBlend == rhs.dualSrcBlend )
3912 && ( logicOp == rhs.logicOp )
3913 && ( multiDrawIndirect == rhs.multiDrawIndirect )
3914 && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
3915 && ( depthClamp == rhs.depthClamp )
3916 && ( depthBiasClamp == rhs.depthBiasClamp )
3917 && ( fillModeNonSolid == rhs.fillModeNonSolid )
3918 && ( depthBounds == rhs.depthBounds )
3919 && ( wideLines == rhs.wideLines )
3920 && ( largePoints == rhs.largePoints )
3921 && ( alphaToOne == rhs.alphaToOne )
3922 && ( multiViewport == rhs.multiViewport )
3923 && ( samplerAnisotropy == rhs.samplerAnisotropy )
3924 && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
3925 && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
3926 && ( textureCompressionBC == rhs.textureCompressionBC )
3927 && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
3928 && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
3929 && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
3930 && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
3931 && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
3932 && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
3933 && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
3934 && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
3935 && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
3936 && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
3937 && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
3938 && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
3939 && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
3940 && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
3941 && ( shaderClipDistance == rhs.shaderClipDistance )
3942 && ( shaderCullDistance == rhs.shaderCullDistance )
3943 && ( shaderFloat64 == rhs.shaderFloat64 )
3944 && ( shaderInt64 == rhs.shaderInt64 )
3945 && ( shaderInt16 == rhs.shaderInt16 )
3946 && ( shaderResourceResidency == rhs.shaderResourceResidency )
3947 && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
3948 && ( sparseBinding == rhs.sparseBinding )
3949 && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
3950 && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
3951 && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
3952 && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
3953 && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
3954 && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
3955 && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
3956 && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
3957 && ( variableMultisampleRate == rhs.variableMultisampleRate )
3958 && ( inheritedQueries == rhs.inheritedQueries );
3959 }
3960
3961 bool operator!=( PhysicalDeviceFeatures const& rhs ) const
3962 {
3963 return !operator==( rhs );
3964 }
3965
3966 Bool32 robustBufferAccess;
3967 Bool32 fullDrawIndexUint32;
3968 Bool32 imageCubeArray;
3969 Bool32 independentBlend;
3970 Bool32 geometryShader;
3971 Bool32 tessellationShader;
3972 Bool32 sampleRateShading;
3973 Bool32 dualSrcBlend;
3974 Bool32 logicOp;
3975 Bool32 multiDrawIndirect;
3976 Bool32 drawIndirectFirstInstance;
3977 Bool32 depthClamp;
3978 Bool32 depthBiasClamp;
3979 Bool32 fillModeNonSolid;
3980 Bool32 depthBounds;
3981 Bool32 wideLines;
3982 Bool32 largePoints;
3983 Bool32 alphaToOne;
3984 Bool32 multiViewport;
3985 Bool32 samplerAnisotropy;
3986 Bool32 textureCompressionETC2;
3987 Bool32 textureCompressionASTC_LDR;
3988 Bool32 textureCompressionBC;
3989 Bool32 occlusionQueryPrecise;
3990 Bool32 pipelineStatisticsQuery;
3991 Bool32 vertexPipelineStoresAndAtomics;
3992 Bool32 fragmentStoresAndAtomics;
3993 Bool32 shaderTessellationAndGeometryPointSize;
3994 Bool32 shaderImageGatherExtended;
3995 Bool32 shaderStorageImageExtendedFormats;
3996 Bool32 shaderStorageImageMultisample;
3997 Bool32 shaderStorageImageReadWithoutFormat;
3998 Bool32 shaderStorageImageWriteWithoutFormat;
3999 Bool32 shaderUniformBufferArrayDynamicIndexing;
4000 Bool32 shaderSampledImageArrayDynamicIndexing;
4001 Bool32 shaderStorageBufferArrayDynamicIndexing;
4002 Bool32 shaderStorageImageArrayDynamicIndexing;
4003 Bool32 shaderClipDistance;
4004 Bool32 shaderCullDistance;
4005 Bool32 shaderFloat64;
4006 Bool32 shaderInt64;
4007 Bool32 shaderInt16;
4008 Bool32 shaderResourceResidency;
4009 Bool32 shaderResourceMinLod;
4010 Bool32 sparseBinding;
4011 Bool32 sparseResidencyBuffer;
4012 Bool32 sparseResidencyImage2D;
4013 Bool32 sparseResidencyImage3D;
4014 Bool32 sparseResidency2Samples;
4015 Bool32 sparseResidency4Samples;
4016 Bool32 sparseResidency8Samples;
4017 Bool32 sparseResidency16Samples;
4018 Bool32 sparseResidencyAliased;
4019 Bool32 variableMultisampleRate;
4020 Bool32 inheritedQueries;
4021 };
4022 static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
4023
4024 struct PhysicalDeviceSparseProperties
4025 {
4026 operator const VkPhysicalDeviceSparseProperties&() const
4027 {
4028 return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
4029 }
4030
4031 bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
4032 {
4033 return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
4034 && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
4035 && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
4036 && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
4037 && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
4038 }
4039
4040 bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
4041 {
4042 return !operator==( rhs );
4043 }
4044
4045 Bool32 residencyStandard2DBlockShape;
4046 Bool32 residencyStandard2DMultisampleBlockShape;
4047 Bool32 residencyStandard3DBlockShape;
4048 Bool32 residencyAlignedMipSize;
4049 Bool32 residencyNonResidentStrict;
4050 };
4051 static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
4052
4053 struct DrawIndirectCommand
4054 {
4055 DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 )
4056 : vertexCount( vertexCount_ )
4057 , instanceCount( instanceCount_ )
4058 , firstVertex( firstVertex_ )
4059 , firstInstance( firstInstance_ )
4060 {
4061 }
4062
4063 DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
4064 {
4065 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4066 }
4067
4068 DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
4069 {
4070 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4071 return *this;
4072 }
4073
4074 DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
4075 {
4076 vertexCount = vertexCount_;
4077 return *this;
4078 }
4079
4080 DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4081 {
4082 instanceCount = instanceCount_;
4083 return *this;
4084 }
4085
4086 DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
4087 {
4088 firstVertex = firstVertex_;
4089 return *this;
4090 }
4091
4092 DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4093 {
4094 firstInstance = firstInstance_;
4095 return *this;
4096 }
4097
4098 operator const VkDrawIndirectCommand&() const
4099 {
4100 return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
4101 }
4102
4103 bool operator==( DrawIndirectCommand const& rhs ) const
4104 {
4105 return ( vertexCount == rhs.vertexCount )
4106 && ( instanceCount == rhs.instanceCount )
4107 && ( firstVertex == rhs.firstVertex )
4108 && ( firstInstance == rhs.firstInstance );
4109 }
4110
4111 bool operator!=( DrawIndirectCommand const& rhs ) const
4112 {
4113 return !operator==( rhs );
4114 }
4115
4116 uint32_t vertexCount;
4117 uint32_t instanceCount;
4118 uint32_t firstVertex;
4119 uint32_t firstInstance;
4120 };
4121 static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
4122
4123 struct DrawIndexedIndirectCommand
4124 {
4125 DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 )
4126 : indexCount( indexCount_ )
4127 , instanceCount( instanceCount_ )
4128 , firstIndex( firstIndex_ )
4129 , vertexOffset( vertexOffset_ )
4130 , firstInstance( firstInstance_ )
4131 {
4132 }
4133
4134 DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
4135 {
4136 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4137 }
4138
4139 DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
4140 {
4141 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4142 return *this;
4143 }
4144
4145 DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
4146 {
4147 indexCount = indexCount_;
4148 return *this;
4149 }
4150
4151 DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4152 {
4153 instanceCount = instanceCount_;
4154 return *this;
4155 }
4156
4157 DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
4158 {
4159 firstIndex = firstIndex_;
4160 return *this;
4161 }
4162
4163 DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
4164 {
4165 vertexOffset = vertexOffset_;
4166 return *this;
4167 }
4168
4169 DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4170 {
4171 firstInstance = firstInstance_;
4172 return *this;
4173 }
4174
4175 operator const VkDrawIndexedIndirectCommand&() const
4176 {
4177 return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
4178 }
4179
4180 bool operator==( DrawIndexedIndirectCommand const& rhs ) const
4181 {
4182 return ( indexCount == rhs.indexCount )
4183 && ( instanceCount == rhs.instanceCount )
4184 && ( firstIndex == rhs.firstIndex )
4185 && ( vertexOffset == rhs.vertexOffset )
4186 && ( firstInstance == rhs.firstInstance );
4187 }
4188
4189 bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
4190 {
4191 return !operator==( rhs );
4192 }
4193
4194 uint32_t indexCount;
4195 uint32_t instanceCount;
4196 uint32_t firstIndex;
4197 int32_t vertexOffset;
4198 uint32_t firstInstance;
4199 };
4200 static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
4201
4202 struct DispatchIndirectCommand
4203 {
4204 DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 )
4205 : x( x_ )
4206 , y( y_ )
4207 , z( z_ )
4208 {
4209 }
4210
4211 DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
4212 {
4213 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4214 }
4215
4216 DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
4217 {
4218 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4219 return *this;
4220 }
4221
4222 DispatchIndirectCommand& setX( uint32_t x_ )
4223 {
4224 x = x_;
4225 return *this;
4226 }
4227
4228 DispatchIndirectCommand& setY( uint32_t y_ )
4229 {
4230 y = y_;
4231 return *this;
4232 }
4233
4234 DispatchIndirectCommand& setZ( uint32_t z_ )
4235 {
4236 z = z_;
4237 return *this;
4238 }
4239
4240 operator const VkDispatchIndirectCommand&() const
4241 {
4242 return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
4243 }
4244
4245 bool operator==( DispatchIndirectCommand const& rhs ) const
4246 {
4247 return ( x == rhs.x )
4248 && ( y == rhs.y )
4249 && ( z == rhs.z );
4250 }
4251
4252 bool operator!=( DispatchIndirectCommand const& rhs ) const
4253 {
4254 return !operator==( rhs );
4255 }
4256
4257 uint32_t x;
4258 uint32_t y;
4259 uint32_t z;
4260 };
4261 static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
4262
4263 struct DisplayPlanePropertiesKHR
4264 {
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004265 operator const VkDisplayPlanePropertiesKHR&() const
4266 {
4267 return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
4268 }
4269
4270 bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
4271 {
4272 return ( currentDisplay == rhs.currentDisplay )
4273 && ( currentStackIndex == rhs.currentStackIndex );
4274 }
4275
4276 bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
4277 {
4278 return !operator==( rhs );
4279 }
4280
4281 DisplayKHR currentDisplay;
4282 uint32_t currentStackIndex;
4283 };
4284 static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
4285
4286 struct DisplayModeParametersKHR
4287 {
4288 DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 )
4289 : visibleRegion( visibleRegion_ )
4290 , refreshRate( refreshRate_ )
4291 {
4292 }
4293
4294 DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
4295 {
4296 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4297 }
4298
4299 DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
4300 {
4301 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4302 return *this;
4303 }
4304
4305 DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
4306 {
4307 visibleRegion = visibleRegion_;
4308 return *this;
4309 }
4310
4311 DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
4312 {
4313 refreshRate = refreshRate_;
4314 return *this;
4315 }
4316
4317 operator const VkDisplayModeParametersKHR&() const
4318 {
4319 return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
4320 }
4321
4322 bool operator==( DisplayModeParametersKHR const& rhs ) const
4323 {
4324 return ( visibleRegion == rhs.visibleRegion )
4325 && ( refreshRate == rhs.refreshRate );
4326 }
4327
4328 bool operator!=( DisplayModeParametersKHR const& rhs ) const
4329 {
4330 return !operator==( rhs );
4331 }
4332
4333 Extent2D visibleRegion;
4334 uint32_t refreshRate;
4335 };
4336 static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
4337
4338 struct DisplayModePropertiesKHR
4339 {
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004340 operator const VkDisplayModePropertiesKHR&() const
4341 {
4342 return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
4343 }
4344
4345 bool operator==( DisplayModePropertiesKHR const& rhs ) const
4346 {
4347 return ( displayMode == rhs.displayMode )
4348 && ( parameters == rhs.parameters );
4349 }
4350
4351 bool operator!=( DisplayModePropertiesKHR const& rhs ) const
4352 {
4353 return !operator==( rhs );
4354 }
4355
4356 DisplayModeKHR displayMode;
4357 DisplayModeParametersKHR parameters;
4358 };
4359 static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
4360
4361 enum class ImageLayout
4362 {
4363 eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
4364 eGeneral = VK_IMAGE_LAYOUT_GENERAL,
4365 eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4366 eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
4367 eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
4368 eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
4369 eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4370 eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4371 ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
4372 ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
4373 };
4374
4375 struct DescriptorImageInfo
4376 {
4377 DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined )
4378 : sampler( sampler_ )
4379 , imageView( imageView_ )
4380 , imageLayout( imageLayout_ )
4381 {
4382 }
4383
4384 DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
4385 {
4386 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4387 }
4388
4389 DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
4390 {
4391 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4392 return *this;
4393 }
4394
4395 DescriptorImageInfo& setSampler( Sampler sampler_ )
4396 {
4397 sampler = sampler_;
4398 return *this;
4399 }
4400
4401 DescriptorImageInfo& setImageView( ImageView imageView_ )
4402 {
4403 imageView = imageView_;
4404 return *this;
4405 }
4406
4407 DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
4408 {
4409 imageLayout = imageLayout_;
4410 return *this;
4411 }
4412
4413 operator const VkDescriptorImageInfo&() const
4414 {
4415 return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
4416 }
4417
4418 bool operator==( DescriptorImageInfo const& rhs ) const
4419 {
4420 return ( sampler == rhs.sampler )
4421 && ( imageView == rhs.imageView )
4422 && ( imageLayout == rhs.imageLayout );
4423 }
4424
4425 bool operator!=( DescriptorImageInfo const& rhs ) const
4426 {
4427 return !operator==( rhs );
4428 }
4429
4430 Sampler sampler;
4431 ImageView imageView;
4432 ImageLayout imageLayout;
4433 };
4434 static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
4435
4436 struct AttachmentReference
4437 {
4438 AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined )
4439 : attachment( attachment_ )
4440 , layout( layout_ )
4441 {
4442 }
4443
4444 AttachmentReference( VkAttachmentReference const & rhs )
4445 {
4446 memcpy( this, &rhs, sizeof(AttachmentReference) );
4447 }
4448
4449 AttachmentReference& operator=( VkAttachmentReference const & rhs )
4450 {
4451 memcpy( this, &rhs, sizeof(AttachmentReference) );
4452 return *this;
4453 }
4454
4455 AttachmentReference& setAttachment( uint32_t attachment_ )
4456 {
4457 attachment = attachment_;
4458 return *this;
4459 }
4460
4461 AttachmentReference& setLayout( ImageLayout layout_ )
4462 {
4463 layout = layout_;
4464 return *this;
4465 }
4466
4467 operator const VkAttachmentReference&() const
4468 {
4469 return *reinterpret_cast<const VkAttachmentReference*>(this);
4470 }
4471
4472 bool operator==( AttachmentReference const& rhs ) const
4473 {
4474 return ( attachment == rhs.attachment )
4475 && ( layout == rhs.layout );
4476 }
4477
4478 bool operator!=( AttachmentReference const& rhs ) const
4479 {
4480 return !operator==( rhs );
4481 }
4482
4483 uint32_t attachment;
4484 ImageLayout layout;
4485 };
4486 static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
4487
4488 enum class AttachmentLoadOp
4489 {
4490 eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
4491 eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
4492 eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
4493 };
4494
4495 enum class AttachmentStoreOp
4496 {
4497 eStore = VK_ATTACHMENT_STORE_OP_STORE,
4498 eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
4499 };
4500
4501 enum class ImageType
4502 {
4503 e1D = VK_IMAGE_TYPE_1D,
4504 e2D = VK_IMAGE_TYPE_2D,
4505 e3D = VK_IMAGE_TYPE_3D
4506 };
4507
4508 enum class ImageTiling
4509 {
4510 eOptimal = VK_IMAGE_TILING_OPTIMAL,
4511 eLinear = VK_IMAGE_TILING_LINEAR
4512 };
4513
4514 enum class ImageViewType
4515 {
4516 e1D = VK_IMAGE_VIEW_TYPE_1D,
4517 e2D = VK_IMAGE_VIEW_TYPE_2D,
4518 e3D = VK_IMAGE_VIEW_TYPE_3D,
4519 eCube = VK_IMAGE_VIEW_TYPE_CUBE,
4520 e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
4521 e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
4522 eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
4523 };
4524
4525 enum class CommandBufferLevel
4526 {
4527 ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
4528 eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
4529 };
4530
4531 enum class ComponentSwizzle
4532 {
4533 eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
4534 eZero = VK_COMPONENT_SWIZZLE_ZERO,
4535 eOne = VK_COMPONENT_SWIZZLE_ONE,
4536 eR = VK_COMPONENT_SWIZZLE_R,
4537 eG = VK_COMPONENT_SWIZZLE_G,
4538 eB = VK_COMPONENT_SWIZZLE_B,
4539 eA = VK_COMPONENT_SWIZZLE_A
4540 };
4541
4542 struct ComponentMapping
4543 {
4544 ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
4545 : r( r_ )
4546 , g( g_ )
4547 , b( b_ )
4548 , a( a_ )
4549 {
4550 }
4551
4552 ComponentMapping( VkComponentMapping const & rhs )
4553 {
4554 memcpy( this, &rhs, sizeof(ComponentMapping) );
4555 }
4556
4557 ComponentMapping& operator=( VkComponentMapping const & rhs )
4558 {
4559 memcpy( this, &rhs, sizeof(ComponentMapping) );
4560 return *this;
4561 }
4562
4563 ComponentMapping& setR( ComponentSwizzle r_ )
4564 {
4565 r = r_;
4566 return *this;
4567 }
4568
4569 ComponentMapping& setG( ComponentSwizzle g_ )
4570 {
4571 g = g_;
4572 return *this;
4573 }
4574
4575 ComponentMapping& setB( ComponentSwizzle b_ )
4576 {
4577 b = b_;
4578 return *this;
4579 }
4580
4581 ComponentMapping& setA( ComponentSwizzle a_ )
4582 {
4583 a = a_;
4584 return *this;
4585 }
4586
4587 operator const VkComponentMapping&() const
4588 {
4589 return *reinterpret_cast<const VkComponentMapping*>(this);
4590 }
4591
4592 bool operator==( ComponentMapping const& rhs ) const
4593 {
4594 return ( r == rhs.r )
4595 && ( g == rhs.g )
4596 && ( b == rhs.b )
4597 && ( a == rhs.a );
4598 }
4599
4600 bool operator!=( ComponentMapping const& rhs ) const
4601 {
4602 return !operator==( rhs );
4603 }
4604
4605 ComponentSwizzle r;
4606 ComponentSwizzle g;
4607 ComponentSwizzle b;
4608 ComponentSwizzle a;
4609 };
4610 static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
4611
4612 enum class DescriptorType
4613 {
4614 eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
4615 eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
4616 eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
4617 eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
4618 eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
4619 eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
4620 eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
4621 eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
4622 eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
4623 eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
4624 eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
4625 };
4626
4627 struct DescriptorPoolSize
4628 {
4629 DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 )
4630 : type( type_ )
4631 , descriptorCount( descriptorCount_ )
4632 {
4633 }
4634
4635 DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
4636 {
4637 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
4638 }
4639
4640 DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
4641 {
4642 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
4643 return *this;
4644 }
4645
4646 DescriptorPoolSize& setType( DescriptorType type_ )
4647 {
4648 type = type_;
4649 return *this;
4650 }
4651
4652 DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
4653 {
4654 descriptorCount = descriptorCount_;
4655 return *this;
4656 }
4657
4658 operator const VkDescriptorPoolSize&() const
4659 {
4660 return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
4661 }
4662
4663 bool operator==( DescriptorPoolSize const& rhs ) const
4664 {
4665 return ( type == rhs.type )
4666 && ( descriptorCount == rhs.descriptorCount );
4667 }
4668
4669 bool operator!=( DescriptorPoolSize const& rhs ) const
4670 {
4671 return !operator==( rhs );
4672 }
4673
4674 DescriptorType type;
4675 uint32_t descriptorCount;
4676 };
4677 static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
4678
4679 enum class QueryType
4680 {
4681 eOcclusion = VK_QUERY_TYPE_OCCLUSION,
4682 ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
4683 eTimestamp = VK_QUERY_TYPE_TIMESTAMP
4684 };
4685
4686 enum class BorderColor
4687 {
4688 eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
4689 eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
4690 eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
4691 eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
4692 eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
4693 eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
4694 };
4695
4696 enum class PipelineBindPoint
4697 {
4698 eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
4699 eCompute = VK_PIPELINE_BIND_POINT_COMPUTE
4700 };
4701
4702 struct SubpassDescription
4703 {
4704 SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr )
4705 : flags( flags_ )
4706 , pipelineBindPoint( pipelineBindPoint_ )
4707 , inputAttachmentCount( inputAttachmentCount_ )
4708 , pInputAttachments( pInputAttachments_ )
4709 , colorAttachmentCount( colorAttachmentCount_ )
4710 , pColorAttachments( pColorAttachments_ )
4711 , pResolveAttachments( pResolveAttachments_ )
4712 , pDepthStencilAttachment( pDepthStencilAttachment_ )
4713 , preserveAttachmentCount( preserveAttachmentCount_ )
4714 , pPreserveAttachments( pPreserveAttachments_ )
4715 {
4716 }
4717
4718 SubpassDescription( VkSubpassDescription const & rhs )
4719 {
4720 memcpy( this, &rhs, sizeof(SubpassDescription) );
4721 }
4722
4723 SubpassDescription& operator=( VkSubpassDescription const & rhs )
4724 {
4725 memcpy( this, &rhs, sizeof(SubpassDescription) );
4726 return *this;
4727 }
4728
4729 SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
4730 {
4731 flags = flags_;
4732 return *this;
4733 }
4734
4735 SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
4736 {
4737 pipelineBindPoint = pipelineBindPoint_;
4738 return *this;
4739 }
4740
4741 SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
4742 {
4743 inputAttachmentCount = inputAttachmentCount_;
4744 return *this;
4745 }
4746
4747 SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
4748 {
4749 pInputAttachments = pInputAttachments_;
4750 return *this;
4751 }
4752
4753 SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
4754 {
4755 colorAttachmentCount = colorAttachmentCount_;
4756 return *this;
4757 }
4758
4759 SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
4760 {
4761 pColorAttachments = pColorAttachments_;
4762 return *this;
4763 }
4764
4765 SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
4766 {
4767 pResolveAttachments = pResolveAttachments_;
4768 return *this;
4769 }
4770
4771 SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
4772 {
4773 pDepthStencilAttachment = pDepthStencilAttachment_;
4774 return *this;
4775 }
4776
4777 SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
4778 {
4779 preserveAttachmentCount = preserveAttachmentCount_;
4780 return *this;
4781 }
4782
4783 SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
4784 {
4785 pPreserveAttachments = pPreserveAttachments_;
4786 return *this;
4787 }
4788
4789 operator const VkSubpassDescription&() const
4790 {
4791 return *reinterpret_cast<const VkSubpassDescription*>(this);
4792 }
4793
4794 bool operator==( SubpassDescription const& rhs ) const
4795 {
4796 return ( flags == rhs.flags )
4797 && ( pipelineBindPoint == rhs.pipelineBindPoint )
4798 && ( inputAttachmentCount == rhs.inputAttachmentCount )
4799 && ( pInputAttachments == rhs.pInputAttachments )
4800 && ( colorAttachmentCount == rhs.colorAttachmentCount )
4801 && ( pColorAttachments == rhs.pColorAttachments )
4802 && ( pResolveAttachments == rhs.pResolveAttachments )
4803 && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
4804 && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
4805 && ( pPreserveAttachments == rhs.pPreserveAttachments );
4806 }
4807
4808 bool operator!=( SubpassDescription const& rhs ) const
4809 {
4810 return !operator==( rhs );
4811 }
4812
4813 SubpassDescriptionFlags flags;
4814 PipelineBindPoint pipelineBindPoint;
4815 uint32_t inputAttachmentCount;
4816 const AttachmentReference* pInputAttachments;
4817 uint32_t colorAttachmentCount;
4818 const AttachmentReference* pColorAttachments;
4819 const AttachmentReference* pResolveAttachments;
4820 const AttachmentReference* pDepthStencilAttachment;
4821 uint32_t preserveAttachmentCount;
4822 const uint32_t* pPreserveAttachments;
4823 };
4824 static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
4825
4826 enum class PipelineCacheHeaderVersion
4827 {
4828 eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
4829 };
4830
4831 enum class PrimitiveTopology
4832 {
4833 ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4834 eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
4835 eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
4836 eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
4837 eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
4838 eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
4839 eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
4840 eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
4841 eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
4842 eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
4843 ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
4844 };
4845
4846 enum class SharingMode
4847 {
4848 eExclusive = VK_SHARING_MODE_EXCLUSIVE,
4849 eConcurrent = VK_SHARING_MODE_CONCURRENT
4850 };
4851
4852 enum class IndexType
4853 {
4854 eUint16 = VK_INDEX_TYPE_UINT16,
4855 eUint32 = VK_INDEX_TYPE_UINT32
4856 };
4857
4858 enum class Filter
4859 {
4860 eNearest = VK_FILTER_NEAREST,
4861 eLinear = VK_FILTER_LINEAR,
4862 eCubicIMG = VK_FILTER_CUBIC_IMG
4863 };
4864
4865 enum class SamplerMipmapMode
4866 {
4867 eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
4868 eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
4869 };
4870
4871 enum class SamplerAddressMode
4872 {
4873 eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
4874 eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
4875 eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
4876 eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
4877 eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
4878 };
4879
4880 enum class CompareOp
4881 {
4882 eNever = VK_COMPARE_OP_NEVER,
4883 eLess = VK_COMPARE_OP_LESS,
4884 eEqual = VK_COMPARE_OP_EQUAL,
4885 eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
4886 eGreater = VK_COMPARE_OP_GREATER,
4887 eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
4888 eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
4889 eAlways = VK_COMPARE_OP_ALWAYS
4890 };
4891
4892 enum class PolygonMode
4893 {
4894 eFill = VK_POLYGON_MODE_FILL,
4895 eLine = VK_POLYGON_MODE_LINE,
4896 ePoint = VK_POLYGON_MODE_POINT
4897 };
4898
4899 enum class CullModeFlagBits
4900 {
4901 eNone = VK_CULL_MODE_NONE,
4902 eFront = VK_CULL_MODE_FRONT_BIT,
4903 eBack = VK_CULL_MODE_BACK_BIT,
4904 eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
4905 };
4906
4907 using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
4908
Mark Lobodzinski2d589822016-12-12 09:44:34 -07004909 VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004910 {
4911 return CullModeFlags( bit0 ) | bit1;
4912 }
4913
Mark Lobodzinski2d589822016-12-12 09:44:34 -07004914 VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
4915 {
4916 return ~( CullModeFlags( bits ) );
4917 }
4918
4919 template <> struct FlagTraits<CullModeFlagBits>
4920 {
4921 enum
4922 {
4923 allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
4924 };
4925 };
4926
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004927 enum class FrontFace
4928 {
4929 eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
4930 eClockwise = VK_FRONT_FACE_CLOCKWISE
4931 };
4932
4933 enum class BlendFactor
4934 {
4935 eZero = VK_BLEND_FACTOR_ZERO,
4936 eOne = VK_BLEND_FACTOR_ONE,
4937 eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
4938 eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
4939 eDstColor = VK_BLEND_FACTOR_DST_COLOR,
4940 eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
4941 eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
4942 eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
4943 eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
4944 eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
4945 eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
4946 eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
4947 eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
4948 eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
4949 eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
4950 eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
4951 eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
4952 eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
4953 eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
4954 };
4955
4956 enum class BlendOp
4957 {
4958 eAdd = VK_BLEND_OP_ADD,
4959 eSubtract = VK_BLEND_OP_SUBTRACT,
4960 eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
4961 eMin = VK_BLEND_OP_MIN,
4962 eMax = VK_BLEND_OP_MAX
4963 };
4964
4965 enum class StencilOp
4966 {
4967 eKeep = VK_STENCIL_OP_KEEP,
4968 eZero = VK_STENCIL_OP_ZERO,
4969 eReplace = VK_STENCIL_OP_REPLACE,
4970 eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
4971 eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
4972 eInvert = VK_STENCIL_OP_INVERT,
4973 eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
4974 eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
4975 };
4976
4977 struct StencilOpState
4978 {
4979 StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 )
4980 : failOp( failOp_ )
4981 , passOp( passOp_ )
4982 , depthFailOp( depthFailOp_ )
4983 , compareOp( compareOp_ )
4984 , compareMask( compareMask_ )
4985 , writeMask( writeMask_ )
4986 , reference( reference_ )
4987 {
4988 }
4989
4990 StencilOpState( VkStencilOpState const & rhs )
4991 {
4992 memcpy( this, &rhs, sizeof(StencilOpState) );
4993 }
4994
4995 StencilOpState& operator=( VkStencilOpState const & rhs )
4996 {
4997 memcpy( this, &rhs, sizeof(StencilOpState) );
4998 return *this;
4999 }
5000
5001 StencilOpState& setFailOp( StencilOp failOp_ )
5002 {
5003 failOp = failOp_;
5004 return *this;
5005 }
5006
5007 StencilOpState& setPassOp( StencilOp passOp_ )
5008 {
5009 passOp = passOp_;
5010 return *this;
5011 }
5012
5013 StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
5014 {
5015 depthFailOp = depthFailOp_;
5016 return *this;
5017 }
5018
5019 StencilOpState& setCompareOp( CompareOp compareOp_ )
5020 {
5021 compareOp = compareOp_;
5022 return *this;
5023 }
5024
5025 StencilOpState& setCompareMask( uint32_t compareMask_ )
5026 {
5027 compareMask = compareMask_;
5028 return *this;
5029 }
5030
5031 StencilOpState& setWriteMask( uint32_t writeMask_ )
5032 {
5033 writeMask = writeMask_;
5034 return *this;
5035 }
5036
5037 StencilOpState& setReference( uint32_t reference_ )
5038 {
5039 reference = reference_;
5040 return *this;
5041 }
5042
5043 operator const VkStencilOpState&() const
5044 {
5045 return *reinterpret_cast<const VkStencilOpState*>(this);
5046 }
5047
5048 bool operator==( StencilOpState const& rhs ) const
5049 {
5050 return ( failOp == rhs.failOp )
5051 && ( passOp == rhs.passOp )
5052 && ( depthFailOp == rhs.depthFailOp )
5053 && ( compareOp == rhs.compareOp )
5054 && ( compareMask == rhs.compareMask )
5055 && ( writeMask == rhs.writeMask )
5056 && ( reference == rhs.reference );
5057 }
5058
5059 bool operator!=( StencilOpState const& rhs ) const
5060 {
5061 return !operator==( rhs );
5062 }
5063
5064 StencilOp failOp;
5065 StencilOp passOp;
5066 StencilOp depthFailOp;
5067 CompareOp compareOp;
5068 uint32_t compareMask;
5069 uint32_t writeMask;
5070 uint32_t reference;
5071 };
5072 static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
5073
5074 enum class LogicOp
5075 {
5076 eClear = VK_LOGIC_OP_CLEAR,
5077 eAnd = VK_LOGIC_OP_AND,
5078 eAndReverse = VK_LOGIC_OP_AND_REVERSE,
5079 eCopy = VK_LOGIC_OP_COPY,
5080 eAndInverted = VK_LOGIC_OP_AND_INVERTED,
5081 eNoOp = VK_LOGIC_OP_NO_OP,
5082 eXor = VK_LOGIC_OP_XOR,
5083 eOr = VK_LOGIC_OP_OR,
5084 eNor = VK_LOGIC_OP_NOR,
5085 eEquivalent = VK_LOGIC_OP_EQUIVALENT,
5086 eInvert = VK_LOGIC_OP_INVERT,
5087 eOrReverse = VK_LOGIC_OP_OR_REVERSE,
5088 eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
5089 eOrInverted = VK_LOGIC_OP_OR_INVERTED,
5090 eNand = VK_LOGIC_OP_NAND,
5091 eSet = VK_LOGIC_OP_SET
5092 };
5093
5094 enum class InternalAllocationType
5095 {
5096 eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
5097 };
5098
5099 enum class SystemAllocationScope
5100 {
5101 eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
5102 eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
5103 eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
5104 eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
5105 eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
5106 };
5107
5108 enum class PhysicalDeviceType
5109 {
5110 eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
5111 eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
5112 eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
5113 eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
5114 eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
5115 };
5116
5117 enum class VertexInputRate
5118 {
5119 eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
5120 eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
5121 };
5122
5123 struct VertexInputBindingDescription
5124 {
5125 VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex )
5126 : binding( binding_ )
5127 , stride( stride_ )
5128 , inputRate( inputRate_ )
5129 {
5130 }
5131
5132 VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
5133 {
5134 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5135 }
5136
5137 VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
5138 {
5139 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5140 return *this;
5141 }
5142
5143 VertexInputBindingDescription& setBinding( uint32_t binding_ )
5144 {
5145 binding = binding_;
5146 return *this;
5147 }
5148
5149 VertexInputBindingDescription& setStride( uint32_t stride_ )
5150 {
5151 stride = stride_;
5152 return *this;
5153 }
5154
5155 VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
5156 {
5157 inputRate = inputRate_;
5158 return *this;
5159 }
5160
5161 operator const VkVertexInputBindingDescription&() const
5162 {
5163 return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
5164 }
5165
5166 bool operator==( VertexInputBindingDescription const& rhs ) const
5167 {
5168 return ( binding == rhs.binding )
5169 && ( stride == rhs.stride )
5170 && ( inputRate == rhs.inputRate );
5171 }
5172
5173 bool operator!=( VertexInputBindingDescription const& rhs ) const
5174 {
5175 return !operator==( rhs );
5176 }
5177
5178 uint32_t binding;
5179 uint32_t stride;
5180 VertexInputRate inputRate;
5181 };
5182 static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
5183
5184 enum class Format
5185 {
5186 eUndefined = VK_FORMAT_UNDEFINED,
5187 eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
5188 eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
5189 eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
5190 eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
5191 eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
5192 eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
5193 eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
5194 eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
5195 eR8Unorm = VK_FORMAT_R8_UNORM,
5196 eR8Snorm = VK_FORMAT_R8_SNORM,
5197 eR8Uscaled = VK_FORMAT_R8_USCALED,
5198 eR8Sscaled = VK_FORMAT_R8_SSCALED,
5199 eR8Uint = VK_FORMAT_R8_UINT,
5200 eR8Sint = VK_FORMAT_R8_SINT,
5201 eR8Srgb = VK_FORMAT_R8_SRGB,
5202 eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
5203 eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
5204 eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
5205 eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
5206 eR8G8Uint = VK_FORMAT_R8G8_UINT,
5207 eR8G8Sint = VK_FORMAT_R8G8_SINT,
5208 eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
5209 eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
5210 eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
5211 eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
5212 eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
5213 eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
5214 eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
5215 eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
5216 eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
5217 eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
5218 eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
5219 eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
5220 eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
5221 eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
5222 eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
5223 eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
5224 eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
5225 eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
5226 eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
5227 eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
5228 eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
5229 eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
5230 eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
5231 eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
5232 eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
5233 eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
5234 eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
5235 eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
5236 eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
5237 eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
5238 eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
5239 eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
5240 eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
5241 eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
5242 eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
5243 eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
5244 eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
5245 eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
5246 eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
5247 eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
5248 eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
5249 eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
5250 eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
5251 eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
5252 eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
5253 eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
5254 eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
5255 eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
5256 eR16Unorm = VK_FORMAT_R16_UNORM,
5257 eR16Snorm = VK_FORMAT_R16_SNORM,
5258 eR16Uscaled = VK_FORMAT_R16_USCALED,
5259 eR16Sscaled = VK_FORMAT_R16_SSCALED,
5260 eR16Uint = VK_FORMAT_R16_UINT,
5261 eR16Sint = VK_FORMAT_R16_SINT,
5262 eR16Sfloat = VK_FORMAT_R16_SFLOAT,
5263 eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
5264 eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
5265 eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
5266 eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
5267 eR16G16Uint = VK_FORMAT_R16G16_UINT,
5268 eR16G16Sint = VK_FORMAT_R16G16_SINT,
5269 eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
5270 eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
5271 eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
5272 eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
5273 eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
5274 eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
5275 eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
5276 eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
5277 eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
5278 eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
5279 eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
5280 eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
5281 eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
5282 eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
5283 eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
5284 eR32Uint = VK_FORMAT_R32_UINT,
5285 eR32Sint = VK_FORMAT_R32_SINT,
5286 eR32Sfloat = VK_FORMAT_R32_SFLOAT,
5287 eR32G32Uint = VK_FORMAT_R32G32_UINT,
5288 eR32G32Sint = VK_FORMAT_R32G32_SINT,
5289 eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
5290 eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
5291 eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
5292 eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
5293 eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
5294 eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
5295 eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
5296 eR64Uint = VK_FORMAT_R64_UINT,
5297 eR64Sint = VK_FORMAT_R64_SINT,
5298 eR64Sfloat = VK_FORMAT_R64_SFLOAT,
5299 eR64G64Uint = VK_FORMAT_R64G64_UINT,
5300 eR64G64Sint = VK_FORMAT_R64G64_SINT,
5301 eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
5302 eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
5303 eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
5304 eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
5305 eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
5306 eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
5307 eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
5308 eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
5309 eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
5310 eD16Unorm = VK_FORMAT_D16_UNORM,
5311 eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
5312 eD32Sfloat = VK_FORMAT_D32_SFLOAT,
5313 eS8Uint = VK_FORMAT_S8_UINT,
5314 eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
5315 eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
5316 eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
5317 eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
5318 eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
5319 eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
5320 eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
5321 eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
5322 eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
5323 eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
5324 eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
5325 eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
5326 eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
5327 eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
5328 eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
5329 eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
5330 eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
5331 eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
5332 eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
5333 eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
5334 eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
5335 eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
5336 eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
5337 eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
5338 eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
5339 eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
5340 eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
5341 eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
5342 eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
5343 eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
5344 eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
5345 eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
5346 eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
5347 eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
5348 eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
5349 eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
5350 eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
5351 eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
5352 eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
5353 eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
5354 eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
5355 eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
5356 eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
5357 eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
5358 eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
5359 eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
5360 eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
5361 eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
5362 eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
5363 eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
5364 eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
5365 eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
5366 eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
5367 eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
5368 eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
5369 eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
Lenny Komowebf33162016-08-26 14:10:08 -06005370 eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
5371 ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
5372 ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
5373 ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
5374 ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
5375 ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
5376 ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
5377 ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
5378 ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005379 };
5380
5381 struct VertexInputAttributeDescription
5382 {
5383 VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 )
5384 : location( location_ )
5385 , binding( binding_ )
5386 , format( format_ )
5387 , offset( offset_ )
5388 {
5389 }
5390
5391 VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
5392 {
5393 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5394 }
5395
5396 VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
5397 {
5398 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5399 return *this;
5400 }
5401
5402 VertexInputAttributeDescription& setLocation( uint32_t location_ )
5403 {
5404 location = location_;
5405 return *this;
5406 }
5407
5408 VertexInputAttributeDescription& setBinding( uint32_t binding_ )
5409 {
5410 binding = binding_;
5411 return *this;
5412 }
5413
5414 VertexInputAttributeDescription& setFormat( Format format_ )
5415 {
5416 format = format_;
5417 return *this;
5418 }
5419
5420 VertexInputAttributeDescription& setOffset( uint32_t offset_ )
5421 {
5422 offset = offset_;
5423 return *this;
5424 }
5425
5426 operator const VkVertexInputAttributeDescription&() const
5427 {
5428 return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
5429 }
5430
5431 bool operator==( VertexInputAttributeDescription const& rhs ) const
5432 {
5433 return ( location == rhs.location )
5434 && ( binding == rhs.binding )
5435 && ( format == rhs.format )
5436 && ( offset == rhs.offset );
5437 }
5438
5439 bool operator!=( VertexInputAttributeDescription const& rhs ) const
5440 {
5441 return !operator==( rhs );
5442 }
5443
5444 uint32_t location;
5445 uint32_t binding;
5446 Format format;
5447 uint32_t offset;
5448 };
5449 static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
5450
5451 enum class StructureType
5452 {
5453 eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
5454 eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
5455 eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
5456 eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
5457 eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
5458 eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
5459 eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
5460 eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
5461 eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
5462 eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
5463 eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
5464 eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
5465 eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
5466 eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5467 eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
5468 eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5469 eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
5470 ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
5471 ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
5472 ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
5473 ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
5474 ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
5475 ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
5476 ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
5477 ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
5478 ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
5479 ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
5480 ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
5481 eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
5482 eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
5483 ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
5484 eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
5485 eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
5486 eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5487 eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5488 eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5489 eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
5490 eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
5491 eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
5492 eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
5493 eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
5494 eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
5495 eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
5496 eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
5497 eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
5498 eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
5499 eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
5500 eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
5501 eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
5502 eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
5503 ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
5504 eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
5505 eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
5506 eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
5507 eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
5508 eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
5509 eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
5510 eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR,
5511 eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
5512 eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
5513 eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
5514 ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
5515 eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
5516 eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
5517 eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
5518 eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
5519 eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
Lenny Komow6501c122016-08-31 15:03:49 -06005520 eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
5521 eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
5522 eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
5523 eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
5524 eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
Lenny Komow68432d72016-09-29 14:16:59 -06005525 eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
Mark Lobodzinski2d589822016-12-12 09:44:34 -07005526 eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
5527 eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
5528 eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
5529 eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
5530 eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
5531 eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
5532 eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005533 };
5534
5535 struct ApplicationInfo
5536 {
5537 ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 )
5538 : sType( StructureType::eApplicationInfo )
5539 , pNext( nullptr )
5540 , pApplicationName( pApplicationName_ )
5541 , applicationVersion( applicationVersion_ )
5542 , pEngineName( pEngineName_ )
5543 , engineVersion( engineVersion_ )
5544 , apiVersion( apiVersion_ )
5545 {
5546 }
5547
5548 ApplicationInfo( VkApplicationInfo const & rhs )
5549 {
5550 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5551 }
5552
5553 ApplicationInfo& operator=( VkApplicationInfo const & rhs )
5554 {
5555 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5556 return *this;
5557 }
5558
5559 ApplicationInfo& setSType( StructureType sType_ )
5560 {
5561 sType = sType_;
5562 return *this;
5563 }
5564
5565 ApplicationInfo& setPNext( const void* pNext_ )
5566 {
5567 pNext = pNext_;
5568 return *this;
5569 }
5570
5571 ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
5572 {
5573 pApplicationName = pApplicationName_;
5574 return *this;
5575 }
5576
5577 ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
5578 {
5579 applicationVersion = applicationVersion_;
5580 return *this;
5581 }
5582
5583 ApplicationInfo& setPEngineName( const char* pEngineName_ )
5584 {
5585 pEngineName = pEngineName_;
5586 return *this;
5587 }
5588
5589 ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
5590 {
5591 engineVersion = engineVersion_;
5592 return *this;
5593 }
5594
5595 ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
5596 {
5597 apiVersion = apiVersion_;
5598 return *this;
5599 }
5600
5601 operator const VkApplicationInfo&() const
5602 {
5603 return *reinterpret_cast<const VkApplicationInfo*>(this);
5604 }
5605
5606 bool operator==( ApplicationInfo const& rhs ) const
5607 {
5608 return ( sType == rhs.sType )
5609 && ( pNext == rhs.pNext )
5610 && ( pApplicationName == rhs.pApplicationName )
5611 && ( applicationVersion == rhs.applicationVersion )
5612 && ( pEngineName == rhs.pEngineName )
5613 && ( engineVersion == rhs.engineVersion )
5614 && ( apiVersion == rhs.apiVersion );
5615 }
5616
5617 bool operator!=( ApplicationInfo const& rhs ) const
5618 {
5619 return !operator==( rhs );
5620 }
5621
5622 private:
5623 StructureType sType;
5624
5625 public:
5626 const void* pNext;
5627 const char* pApplicationName;
5628 uint32_t applicationVersion;
5629 const char* pEngineName;
5630 uint32_t engineVersion;
5631 uint32_t apiVersion;
5632 };
5633 static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
5634
5635 struct DeviceQueueCreateInfo
5636 {
5637 DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr )
5638 : sType( StructureType::eDeviceQueueCreateInfo )
5639 , pNext( nullptr )
5640 , flags( flags_ )
5641 , queueFamilyIndex( queueFamilyIndex_ )
5642 , queueCount( queueCount_ )
5643 , pQueuePriorities( pQueuePriorities_ )
5644 {
5645 }
5646
5647 DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
5648 {
5649 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
5650 }
5651
5652 DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
5653 {
5654 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
5655 return *this;
5656 }
5657
5658 DeviceQueueCreateInfo& setSType( StructureType sType_ )
5659 {
5660 sType = sType_;
5661 return *this;
5662 }
5663
5664 DeviceQueueCreateInfo& setPNext( const void* pNext_ )
5665 {
5666 pNext = pNext_;
5667 return *this;
5668 }
5669
5670 DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
5671 {
5672 flags = flags_;
5673 return *this;
5674 }
5675
5676 DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
5677 {
5678 queueFamilyIndex = queueFamilyIndex_;
5679 return *this;
5680 }
5681
5682 DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
5683 {
5684 queueCount = queueCount_;
5685 return *this;
5686 }
5687
5688 DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
5689 {
5690 pQueuePriorities = pQueuePriorities_;
5691 return *this;
5692 }
5693
5694 operator const VkDeviceQueueCreateInfo&() const
5695 {
5696 return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
5697 }
5698
5699 bool operator==( DeviceQueueCreateInfo const& rhs ) const
5700 {
5701 return ( sType == rhs.sType )
5702 && ( pNext == rhs.pNext )
5703 && ( flags == rhs.flags )
5704 && ( queueFamilyIndex == rhs.queueFamilyIndex )
5705 && ( queueCount == rhs.queueCount )
5706 && ( pQueuePriorities == rhs.pQueuePriorities );
5707 }
5708
5709 bool operator!=( DeviceQueueCreateInfo const& rhs ) const
5710 {
5711 return !operator==( rhs );
5712 }
5713
5714 private:
5715 StructureType sType;
5716
5717 public:
5718 const void* pNext;
5719 DeviceQueueCreateFlags flags;
5720 uint32_t queueFamilyIndex;
5721 uint32_t queueCount;
5722 const float* pQueuePriorities;
5723 };
5724 static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
5725
5726 struct DeviceCreateInfo
5727 {
5728 DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
5729 : sType( StructureType::eDeviceCreateInfo )
5730 , pNext( nullptr )
5731 , flags( flags_ )
5732 , queueCreateInfoCount( queueCreateInfoCount_ )
5733 , pQueueCreateInfos( pQueueCreateInfos_ )
5734 , enabledLayerCount( enabledLayerCount_ )
5735 , ppEnabledLayerNames( ppEnabledLayerNames_ )
5736 , enabledExtensionCount( enabledExtensionCount_ )
5737 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
5738 , pEnabledFeatures( pEnabledFeatures_ )
5739 {
5740 }
5741
5742 DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
5743 {
5744 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
5745 }
5746
5747 DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
5748 {
5749 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
5750 return *this;
5751 }
5752
5753 DeviceCreateInfo& setSType( StructureType sType_ )
5754 {
5755 sType = sType_;
5756 return *this;
5757 }
5758
5759 DeviceCreateInfo& setPNext( const void* pNext_ )
5760 {
5761 pNext = pNext_;
5762 return *this;
5763 }
5764
5765 DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
5766 {
5767 flags = flags_;
5768 return *this;
5769 }
5770
5771 DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
5772 {
5773 queueCreateInfoCount = queueCreateInfoCount_;
5774 return *this;
5775 }
5776
5777 DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
5778 {
5779 pQueueCreateInfos = pQueueCreateInfos_;
5780 return *this;
5781 }
5782
5783 DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
5784 {
5785 enabledLayerCount = enabledLayerCount_;
5786 return *this;
5787 }
5788
5789 DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
5790 {
5791 ppEnabledLayerNames = ppEnabledLayerNames_;
5792 return *this;
5793 }
5794
5795 DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
5796 {
5797 enabledExtensionCount = enabledExtensionCount_;
5798 return *this;
5799 }
5800
5801 DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
5802 {
5803 ppEnabledExtensionNames = ppEnabledExtensionNames_;
5804 return *this;
5805 }
5806
5807 DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
5808 {
5809 pEnabledFeatures = pEnabledFeatures_;
5810 return *this;
5811 }
5812
5813 operator const VkDeviceCreateInfo&() const
5814 {
5815 return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
5816 }
5817
5818 bool operator==( DeviceCreateInfo const& rhs ) const
5819 {
5820 return ( sType == rhs.sType )
5821 && ( pNext == rhs.pNext )
5822 && ( flags == rhs.flags )
5823 && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
5824 && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
5825 && ( enabledLayerCount == rhs.enabledLayerCount )
5826 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
5827 && ( enabledExtensionCount == rhs.enabledExtensionCount )
5828 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
5829 && ( pEnabledFeatures == rhs.pEnabledFeatures );
5830 }
5831
5832 bool operator!=( DeviceCreateInfo const& rhs ) const
5833 {
5834 return !operator==( rhs );
5835 }
5836
5837 private:
5838 StructureType sType;
5839
5840 public:
5841 const void* pNext;
5842 DeviceCreateFlags flags;
5843 uint32_t queueCreateInfoCount;
5844 const DeviceQueueCreateInfo* pQueueCreateInfos;
5845 uint32_t enabledLayerCount;
5846 const char* const* ppEnabledLayerNames;
5847 uint32_t enabledExtensionCount;
5848 const char* const* ppEnabledExtensionNames;
5849 const PhysicalDeviceFeatures* pEnabledFeatures;
5850 };
5851 static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
5852
5853 struct InstanceCreateInfo
5854 {
5855 InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr )
5856 : sType( StructureType::eInstanceCreateInfo )
5857 , pNext( nullptr )
5858 , flags( flags_ )
5859 , pApplicationInfo( pApplicationInfo_ )
5860 , enabledLayerCount( enabledLayerCount_ )
5861 , ppEnabledLayerNames( ppEnabledLayerNames_ )
5862 , enabledExtensionCount( enabledExtensionCount_ )
5863 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
5864 {
5865 }
5866
5867 InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
5868 {
5869 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
5870 }
5871
5872 InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
5873 {
5874 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
5875 return *this;
5876 }
5877
5878 InstanceCreateInfo& setSType( StructureType sType_ )
5879 {
5880 sType = sType_;
5881 return *this;
5882 }
5883
5884 InstanceCreateInfo& setPNext( const void* pNext_ )
5885 {
5886 pNext = pNext_;
5887 return *this;
5888 }
5889
5890 InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
5891 {
5892 flags = flags_;
5893 return *this;
5894 }
5895
5896 InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
5897 {
5898 pApplicationInfo = pApplicationInfo_;
5899 return *this;
5900 }
5901
5902 InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
5903 {
5904 enabledLayerCount = enabledLayerCount_;
5905 return *this;
5906 }
5907
5908 InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
5909 {
5910 ppEnabledLayerNames = ppEnabledLayerNames_;
5911 return *this;
5912 }
5913
5914 InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
5915 {
5916 enabledExtensionCount = enabledExtensionCount_;
5917 return *this;
5918 }
5919
5920 InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
5921 {
5922 ppEnabledExtensionNames = ppEnabledExtensionNames_;
5923 return *this;
5924 }
5925
5926 operator const VkInstanceCreateInfo&() const
5927 {
5928 return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
5929 }
5930
5931 bool operator==( InstanceCreateInfo const& rhs ) const
5932 {
5933 return ( sType == rhs.sType )
5934 && ( pNext == rhs.pNext )
5935 && ( flags == rhs.flags )
5936 && ( pApplicationInfo == rhs.pApplicationInfo )
5937 && ( enabledLayerCount == rhs.enabledLayerCount )
5938 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
5939 && ( enabledExtensionCount == rhs.enabledExtensionCount )
5940 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
5941 }
5942
5943 bool operator!=( InstanceCreateInfo const& rhs ) const
5944 {
5945 return !operator==( rhs );
5946 }
5947
5948 private:
5949 StructureType sType;
5950
5951 public:
5952 const void* pNext;
5953 InstanceCreateFlags flags;
5954 const ApplicationInfo* pApplicationInfo;
5955 uint32_t enabledLayerCount;
5956 const char* const* ppEnabledLayerNames;
5957 uint32_t enabledExtensionCount;
5958 const char* const* ppEnabledExtensionNames;
5959 };
5960 static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
5961
5962 struct MemoryAllocateInfo
5963 {
5964 MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 )
5965 : sType( StructureType::eMemoryAllocateInfo )
5966 , pNext( nullptr )
5967 , allocationSize( allocationSize_ )
5968 , memoryTypeIndex( memoryTypeIndex_ )
5969 {
5970 }
5971
5972 MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
5973 {
5974 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
5975 }
5976
5977 MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
5978 {
5979 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
5980 return *this;
5981 }
5982
5983 MemoryAllocateInfo& setSType( StructureType sType_ )
5984 {
5985 sType = sType_;
5986 return *this;
5987 }
5988
5989 MemoryAllocateInfo& setPNext( const void* pNext_ )
5990 {
5991 pNext = pNext_;
5992 return *this;
5993 }
5994
5995 MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
5996 {
5997 allocationSize = allocationSize_;
5998 return *this;
5999 }
6000
6001 MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
6002 {
6003 memoryTypeIndex = memoryTypeIndex_;
6004 return *this;
6005 }
6006
6007 operator const VkMemoryAllocateInfo&() const
6008 {
6009 return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
6010 }
6011
6012 bool operator==( MemoryAllocateInfo const& rhs ) const
6013 {
6014 return ( sType == rhs.sType )
6015 && ( pNext == rhs.pNext )
6016 && ( allocationSize == rhs.allocationSize )
6017 && ( memoryTypeIndex == rhs.memoryTypeIndex );
6018 }
6019
6020 bool operator!=( MemoryAllocateInfo const& rhs ) const
6021 {
6022 return !operator==( rhs );
6023 }
6024
6025 private:
6026 StructureType sType;
6027
6028 public:
6029 const void* pNext;
6030 DeviceSize allocationSize;
6031 uint32_t memoryTypeIndex;
6032 };
6033 static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
6034
6035 struct MappedMemoryRange
6036 {
6037 MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
6038 : sType( StructureType::eMappedMemoryRange )
6039 , pNext( nullptr )
6040 , memory( memory_ )
6041 , offset( offset_ )
6042 , size( size_ )
6043 {
6044 }
6045
6046 MappedMemoryRange( VkMappedMemoryRange const & rhs )
6047 {
6048 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6049 }
6050
6051 MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
6052 {
6053 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6054 return *this;
6055 }
6056
6057 MappedMemoryRange& setSType( StructureType sType_ )
6058 {
6059 sType = sType_;
6060 return *this;
6061 }
6062
6063 MappedMemoryRange& setPNext( const void* pNext_ )
6064 {
6065 pNext = pNext_;
6066 return *this;
6067 }
6068
6069 MappedMemoryRange& setMemory( DeviceMemory memory_ )
6070 {
6071 memory = memory_;
6072 return *this;
6073 }
6074
6075 MappedMemoryRange& setOffset( DeviceSize offset_ )
6076 {
6077 offset = offset_;
6078 return *this;
6079 }
6080
6081 MappedMemoryRange& setSize( DeviceSize size_ )
6082 {
6083 size = size_;
6084 return *this;
6085 }
6086
6087 operator const VkMappedMemoryRange&() const
6088 {
6089 return *reinterpret_cast<const VkMappedMemoryRange*>(this);
6090 }
6091
6092 bool operator==( MappedMemoryRange const& rhs ) const
6093 {
6094 return ( sType == rhs.sType )
6095 && ( pNext == rhs.pNext )
6096 && ( memory == rhs.memory )
6097 && ( offset == rhs.offset )
6098 && ( size == rhs.size );
6099 }
6100
6101 bool operator!=( MappedMemoryRange const& rhs ) const
6102 {
6103 return !operator==( rhs );
6104 }
6105
6106 private:
6107 StructureType sType;
6108
6109 public:
6110 const void* pNext;
6111 DeviceMemory memory;
6112 DeviceSize offset;
6113 DeviceSize size;
6114 };
6115 static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
6116
6117 struct WriteDescriptorSet
6118 {
6119 WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr )
6120 : sType( StructureType::eWriteDescriptorSet )
6121 , pNext( nullptr )
6122 , dstSet( dstSet_ )
6123 , dstBinding( dstBinding_ )
6124 , dstArrayElement( dstArrayElement_ )
6125 , descriptorCount( descriptorCount_ )
6126 , descriptorType( descriptorType_ )
6127 , pImageInfo( pImageInfo_ )
6128 , pBufferInfo( pBufferInfo_ )
6129 , pTexelBufferView( pTexelBufferView_ )
6130 {
6131 }
6132
6133 WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
6134 {
6135 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6136 }
6137
6138 WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
6139 {
6140 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6141 return *this;
6142 }
6143
6144 WriteDescriptorSet& setSType( StructureType sType_ )
6145 {
6146 sType = sType_;
6147 return *this;
6148 }
6149
6150 WriteDescriptorSet& setPNext( const void* pNext_ )
6151 {
6152 pNext = pNext_;
6153 return *this;
6154 }
6155
6156 WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6157 {
6158 dstSet = dstSet_;
6159 return *this;
6160 }
6161
6162 WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6163 {
6164 dstBinding = dstBinding_;
6165 return *this;
6166 }
6167
6168 WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6169 {
6170 dstArrayElement = dstArrayElement_;
6171 return *this;
6172 }
6173
6174 WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6175 {
6176 descriptorCount = descriptorCount_;
6177 return *this;
6178 }
6179
6180 WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
6181 {
6182 descriptorType = descriptorType_;
6183 return *this;
6184 }
6185
6186 WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
6187 {
6188 pImageInfo = pImageInfo_;
6189 return *this;
6190 }
6191
6192 WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
6193 {
6194 pBufferInfo = pBufferInfo_;
6195 return *this;
6196 }
6197
6198 WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
6199 {
6200 pTexelBufferView = pTexelBufferView_;
6201 return *this;
6202 }
6203
6204 operator const VkWriteDescriptorSet&() const
6205 {
6206 return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
6207 }
6208
6209 bool operator==( WriteDescriptorSet const& rhs ) const
6210 {
6211 return ( sType == rhs.sType )
6212 && ( pNext == rhs.pNext )
6213 && ( dstSet == rhs.dstSet )
6214 && ( dstBinding == rhs.dstBinding )
6215 && ( dstArrayElement == rhs.dstArrayElement )
6216 && ( descriptorCount == rhs.descriptorCount )
6217 && ( descriptorType == rhs.descriptorType )
6218 && ( pImageInfo == rhs.pImageInfo )
6219 && ( pBufferInfo == rhs.pBufferInfo )
6220 && ( pTexelBufferView == rhs.pTexelBufferView );
6221 }
6222
6223 bool operator!=( WriteDescriptorSet const& rhs ) const
6224 {
6225 return !operator==( rhs );
6226 }
6227
6228 private:
6229 StructureType sType;
6230
6231 public:
6232 const void* pNext;
6233 DescriptorSet dstSet;
6234 uint32_t dstBinding;
6235 uint32_t dstArrayElement;
6236 uint32_t descriptorCount;
6237 DescriptorType descriptorType;
6238 const DescriptorImageInfo* pImageInfo;
6239 const DescriptorBufferInfo* pBufferInfo;
6240 const BufferView* pTexelBufferView;
6241 };
6242 static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
6243
6244 struct CopyDescriptorSet
6245 {
6246 CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 )
6247 : sType( StructureType::eCopyDescriptorSet )
6248 , pNext( nullptr )
6249 , srcSet( srcSet_ )
6250 , srcBinding( srcBinding_ )
6251 , srcArrayElement( srcArrayElement_ )
6252 , dstSet( dstSet_ )
6253 , dstBinding( dstBinding_ )
6254 , dstArrayElement( dstArrayElement_ )
6255 , descriptorCount( descriptorCount_ )
6256 {
6257 }
6258
6259 CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
6260 {
6261 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6262 }
6263
6264 CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
6265 {
6266 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6267 return *this;
6268 }
6269
6270 CopyDescriptorSet& setSType( StructureType sType_ )
6271 {
6272 sType = sType_;
6273 return *this;
6274 }
6275
6276 CopyDescriptorSet& setPNext( const void* pNext_ )
6277 {
6278 pNext = pNext_;
6279 return *this;
6280 }
6281
6282 CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
6283 {
6284 srcSet = srcSet_;
6285 return *this;
6286 }
6287
6288 CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
6289 {
6290 srcBinding = srcBinding_;
6291 return *this;
6292 }
6293
6294 CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
6295 {
6296 srcArrayElement = srcArrayElement_;
6297 return *this;
6298 }
6299
6300 CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6301 {
6302 dstSet = dstSet_;
6303 return *this;
6304 }
6305
6306 CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6307 {
6308 dstBinding = dstBinding_;
6309 return *this;
6310 }
6311
6312 CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6313 {
6314 dstArrayElement = dstArrayElement_;
6315 return *this;
6316 }
6317
6318 CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6319 {
6320 descriptorCount = descriptorCount_;
6321 return *this;
6322 }
6323
6324 operator const VkCopyDescriptorSet&() const
6325 {
6326 return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
6327 }
6328
6329 bool operator==( CopyDescriptorSet const& rhs ) const
6330 {
6331 return ( sType == rhs.sType )
6332 && ( pNext == rhs.pNext )
6333 && ( srcSet == rhs.srcSet )
6334 && ( srcBinding == rhs.srcBinding )
6335 && ( srcArrayElement == rhs.srcArrayElement )
6336 && ( dstSet == rhs.dstSet )
6337 && ( dstBinding == rhs.dstBinding )
6338 && ( dstArrayElement == rhs.dstArrayElement )
6339 && ( descriptorCount == rhs.descriptorCount );
6340 }
6341
6342 bool operator!=( CopyDescriptorSet const& rhs ) const
6343 {
6344 return !operator==( rhs );
6345 }
6346
6347 private:
6348 StructureType sType;
6349
6350 public:
6351 const void* pNext;
6352 DescriptorSet srcSet;
6353 uint32_t srcBinding;
6354 uint32_t srcArrayElement;
6355 DescriptorSet dstSet;
6356 uint32_t dstBinding;
6357 uint32_t dstArrayElement;
6358 uint32_t descriptorCount;
6359 };
6360 static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
6361
6362 struct BufferViewCreateInfo
6363 {
6364 BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 )
6365 : sType( StructureType::eBufferViewCreateInfo )
6366 , pNext( nullptr )
6367 , flags( flags_ )
6368 , buffer( buffer_ )
6369 , format( format_ )
6370 , offset( offset_ )
6371 , range( range_ )
6372 {
6373 }
6374
6375 BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
6376 {
6377 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6378 }
6379
6380 BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
6381 {
6382 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6383 return *this;
6384 }
6385
6386 BufferViewCreateInfo& setSType( StructureType sType_ )
6387 {
6388 sType = sType_;
6389 return *this;
6390 }
6391
6392 BufferViewCreateInfo& setPNext( const void* pNext_ )
6393 {
6394 pNext = pNext_;
6395 return *this;
6396 }
6397
6398 BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
6399 {
6400 flags = flags_;
6401 return *this;
6402 }
6403
6404 BufferViewCreateInfo& setBuffer( Buffer buffer_ )
6405 {
6406 buffer = buffer_;
6407 return *this;
6408 }
6409
6410 BufferViewCreateInfo& setFormat( Format format_ )
6411 {
6412 format = format_;
6413 return *this;
6414 }
6415
6416 BufferViewCreateInfo& setOffset( DeviceSize offset_ )
6417 {
6418 offset = offset_;
6419 return *this;
6420 }
6421
6422 BufferViewCreateInfo& setRange( DeviceSize range_ )
6423 {
6424 range = range_;
6425 return *this;
6426 }
6427
6428 operator const VkBufferViewCreateInfo&() const
6429 {
6430 return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
6431 }
6432
6433 bool operator==( BufferViewCreateInfo const& rhs ) const
6434 {
6435 return ( sType == rhs.sType )
6436 && ( pNext == rhs.pNext )
6437 && ( flags == rhs.flags )
6438 && ( buffer == rhs.buffer )
6439 && ( format == rhs.format )
6440 && ( offset == rhs.offset )
6441 && ( range == rhs.range );
6442 }
6443
6444 bool operator!=( BufferViewCreateInfo const& rhs ) const
6445 {
6446 return !operator==( rhs );
6447 }
6448
6449 private:
6450 StructureType sType;
6451
6452 public:
6453 const void* pNext;
6454 BufferViewCreateFlags flags;
6455 Buffer buffer;
6456 Format format;
6457 DeviceSize offset;
6458 DeviceSize range;
6459 };
6460 static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
6461
6462 struct ShaderModuleCreateInfo
6463 {
6464 ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr )
6465 : sType( StructureType::eShaderModuleCreateInfo )
6466 , pNext( nullptr )
6467 , flags( flags_ )
6468 , codeSize( codeSize_ )
6469 , pCode( pCode_ )
6470 {
6471 }
6472
6473 ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
6474 {
6475 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6476 }
6477
6478 ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
6479 {
6480 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6481 return *this;
6482 }
6483
6484 ShaderModuleCreateInfo& setSType( StructureType sType_ )
6485 {
6486 sType = sType_;
6487 return *this;
6488 }
6489
6490 ShaderModuleCreateInfo& setPNext( const void* pNext_ )
6491 {
6492 pNext = pNext_;
6493 return *this;
6494 }
6495
6496 ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
6497 {
6498 flags = flags_;
6499 return *this;
6500 }
6501
6502 ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
6503 {
6504 codeSize = codeSize_;
6505 return *this;
6506 }
6507
6508 ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
6509 {
6510 pCode = pCode_;
6511 return *this;
6512 }
6513
6514 operator const VkShaderModuleCreateInfo&() const
6515 {
6516 return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
6517 }
6518
6519 bool operator==( ShaderModuleCreateInfo const& rhs ) const
6520 {
6521 return ( sType == rhs.sType )
6522 && ( pNext == rhs.pNext )
6523 && ( flags == rhs.flags )
6524 && ( codeSize == rhs.codeSize )
6525 && ( pCode == rhs.pCode );
6526 }
6527
6528 bool operator!=( ShaderModuleCreateInfo const& rhs ) const
6529 {
6530 return !operator==( rhs );
6531 }
6532
6533 private:
6534 StructureType sType;
6535
6536 public:
6537 const void* pNext;
6538 ShaderModuleCreateFlags flags;
6539 size_t codeSize;
6540 const uint32_t* pCode;
6541 };
6542 static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
6543
6544 struct DescriptorSetAllocateInfo
6545 {
6546 DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr )
6547 : sType( StructureType::eDescriptorSetAllocateInfo )
6548 , pNext( nullptr )
6549 , descriptorPool( descriptorPool_ )
6550 , descriptorSetCount( descriptorSetCount_ )
6551 , pSetLayouts( pSetLayouts_ )
6552 {
6553 }
6554
6555 DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
6556 {
6557 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6558 }
6559
6560 DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
6561 {
6562 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6563 return *this;
6564 }
6565
6566 DescriptorSetAllocateInfo& setSType( StructureType sType_ )
6567 {
6568 sType = sType_;
6569 return *this;
6570 }
6571
6572 DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
6573 {
6574 pNext = pNext_;
6575 return *this;
6576 }
6577
6578 DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
6579 {
6580 descriptorPool = descriptorPool_;
6581 return *this;
6582 }
6583
6584 DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
6585 {
6586 descriptorSetCount = descriptorSetCount_;
6587 return *this;
6588 }
6589
6590 DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
6591 {
6592 pSetLayouts = pSetLayouts_;
6593 return *this;
6594 }
6595
6596 operator const VkDescriptorSetAllocateInfo&() const
6597 {
6598 return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
6599 }
6600
6601 bool operator==( DescriptorSetAllocateInfo const& rhs ) const
6602 {
6603 return ( sType == rhs.sType )
6604 && ( pNext == rhs.pNext )
6605 && ( descriptorPool == rhs.descriptorPool )
6606 && ( descriptorSetCount == rhs.descriptorSetCount )
6607 && ( pSetLayouts == rhs.pSetLayouts );
6608 }
6609
6610 bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
6611 {
6612 return !operator==( rhs );
6613 }
6614
6615 private:
6616 StructureType sType;
6617
6618 public:
6619 const void* pNext;
6620 DescriptorPool descriptorPool;
6621 uint32_t descriptorSetCount;
6622 const DescriptorSetLayout* pSetLayouts;
6623 };
6624 static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
6625
6626 struct PipelineVertexInputStateCreateInfo
6627 {
6628 PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
6629 : sType( StructureType::ePipelineVertexInputStateCreateInfo )
6630 , pNext( nullptr )
6631 , flags( flags_ )
6632 , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
6633 , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
6634 , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
6635 , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
6636 {
6637 }
6638
6639 PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
6640 {
6641 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
6642 }
6643
6644 PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
6645 {
6646 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
6647 return *this;
6648 }
6649
6650 PipelineVertexInputStateCreateInfo& setSType( StructureType sType_ )
6651 {
6652 sType = sType_;
6653 return *this;
6654 }
6655
6656 PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
6657 {
6658 pNext = pNext_;
6659 return *this;
6660 }
6661
6662 PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
6663 {
6664 flags = flags_;
6665 return *this;
6666 }
6667
6668 PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
6669 {
6670 vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
6671 return *this;
6672 }
6673
6674 PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
6675 {
6676 pVertexBindingDescriptions = pVertexBindingDescriptions_;
6677 return *this;
6678 }
6679
6680 PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
6681 {
6682 vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
6683 return *this;
6684 }
6685
6686 PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
6687 {
6688 pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
6689 return *this;
6690 }
6691
6692 operator const VkPipelineVertexInputStateCreateInfo&() const
6693 {
6694 return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
6695 }
6696
6697 bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
6698 {
6699 return ( sType == rhs.sType )
6700 && ( pNext == rhs.pNext )
6701 && ( flags == rhs.flags )
6702 && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
6703 && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
6704 && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
6705 && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
6706 }
6707
6708 bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
6709 {
6710 return !operator==( rhs );
6711 }
6712
6713 private:
6714 StructureType sType;
6715
6716 public:
6717 const void* pNext;
6718 PipelineVertexInputStateCreateFlags flags;
6719 uint32_t vertexBindingDescriptionCount;
6720 const VertexInputBindingDescription* pVertexBindingDescriptions;
6721 uint32_t vertexAttributeDescriptionCount;
6722 const VertexInputAttributeDescription* pVertexAttributeDescriptions;
6723 };
6724 static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
6725
6726 struct PipelineInputAssemblyStateCreateInfo
6727 {
6728 PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 )
6729 : sType( StructureType::ePipelineInputAssemblyStateCreateInfo )
6730 , pNext( nullptr )
6731 , flags( flags_ )
6732 , topology( topology_ )
6733 , primitiveRestartEnable( primitiveRestartEnable_ )
6734 {
6735 }
6736
6737 PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
6738 {
6739 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
6740 }
6741
6742 PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
6743 {
6744 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
6745 return *this;
6746 }
6747
6748 PipelineInputAssemblyStateCreateInfo& setSType( StructureType sType_ )
6749 {
6750 sType = sType_;
6751 return *this;
6752 }
6753
6754 PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
6755 {
6756 pNext = pNext_;
6757 return *this;
6758 }
6759
6760 PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
6761 {
6762 flags = flags_;
6763 return *this;
6764 }
6765
6766 PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
6767 {
6768 topology = topology_;
6769 return *this;
6770 }
6771
6772 PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
6773 {
6774 primitiveRestartEnable = primitiveRestartEnable_;
6775 return *this;
6776 }
6777
6778 operator const VkPipelineInputAssemblyStateCreateInfo&() const
6779 {
6780 return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
6781 }
6782
6783 bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
6784 {
6785 return ( sType == rhs.sType )
6786 && ( pNext == rhs.pNext )
6787 && ( flags == rhs.flags )
6788 && ( topology == rhs.topology )
6789 && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
6790 }
6791
6792 bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
6793 {
6794 return !operator==( rhs );
6795 }
6796
6797 private:
6798 StructureType sType;
6799
6800 public:
6801 const void* pNext;
6802 PipelineInputAssemblyStateCreateFlags flags;
6803 PrimitiveTopology topology;
6804 Bool32 primitiveRestartEnable;
6805 };
6806 static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
6807
6808 struct PipelineTessellationStateCreateInfo
6809 {
6810 PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 )
6811 : sType( StructureType::ePipelineTessellationStateCreateInfo )
6812 , pNext( nullptr )
6813 , flags( flags_ )
6814 , patchControlPoints( patchControlPoints_ )
6815 {
6816 }
6817
6818 PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
6819 {
6820 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
6821 }
6822
6823 PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
6824 {
6825 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
6826 return *this;
6827 }
6828
6829 PipelineTessellationStateCreateInfo& setSType( StructureType sType_ )
6830 {
6831 sType = sType_;
6832 return *this;
6833 }
6834
6835 PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
6836 {
6837 pNext = pNext_;
6838 return *this;
6839 }
6840
6841 PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
6842 {
6843 flags = flags_;
6844 return *this;
6845 }
6846
6847 PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
6848 {
6849 patchControlPoints = patchControlPoints_;
6850 return *this;
6851 }
6852
6853 operator const VkPipelineTessellationStateCreateInfo&() const
6854 {
6855 return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
6856 }
6857
6858 bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
6859 {
6860 return ( sType == rhs.sType )
6861 && ( pNext == rhs.pNext )
6862 && ( flags == rhs.flags )
6863 && ( patchControlPoints == rhs.patchControlPoints );
6864 }
6865
6866 bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
6867 {
6868 return !operator==( rhs );
6869 }
6870
6871 private:
6872 StructureType sType;
6873
6874 public:
6875 const void* pNext;
6876 PipelineTessellationStateCreateFlags flags;
6877 uint32_t patchControlPoints;
6878 };
6879 static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
6880
6881 struct PipelineViewportStateCreateInfo
6882 {
6883 PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr )
6884 : sType( StructureType::ePipelineViewportStateCreateInfo )
6885 , pNext( nullptr )
6886 , flags( flags_ )
6887 , viewportCount( viewportCount_ )
6888 , pViewports( pViewports_ )
6889 , scissorCount( scissorCount_ )
6890 , pScissors( pScissors_ )
6891 {
6892 }
6893
6894 PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
6895 {
6896 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
6897 }
6898
6899 PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
6900 {
6901 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
6902 return *this;
6903 }
6904
6905 PipelineViewportStateCreateInfo& setSType( StructureType sType_ )
6906 {
6907 sType = sType_;
6908 return *this;
6909 }
6910
6911 PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
6912 {
6913 pNext = pNext_;
6914 return *this;
6915 }
6916
6917 PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
6918 {
6919 flags = flags_;
6920 return *this;
6921 }
6922
6923 PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
6924 {
6925 viewportCount = viewportCount_;
6926 return *this;
6927 }
6928
6929 PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
6930 {
6931 pViewports = pViewports_;
6932 return *this;
6933 }
6934
6935 PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
6936 {
6937 scissorCount = scissorCount_;
6938 return *this;
6939 }
6940
6941 PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
6942 {
6943 pScissors = pScissors_;
6944 return *this;
6945 }
6946
6947 operator const VkPipelineViewportStateCreateInfo&() const
6948 {
6949 return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
6950 }
6951
6952 bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
6953 {
6954 return ( sType == rhs.sType )
6955 && ( pNext == rhs.pNext )
6956 && ( flags == rhs.flags )
6957 && ( viewportCount == rhs.viewportCount )
6958 && ( pViewports == rhs.pViewports )
6959 && ( scissorCount == rhs.scissorCount )
6960 && ( pScissors == rhs.pScissors );
6961 }
6962
6963 bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
6964 {
6965 return !operator==( rhs );
6966 }
6967
6968 private:
6969 StructureType sType;
6970
6971 public:
6972 const void* pNext;
6973 PipelineViewportStateCreateFlags flags;
6974 uint32_t viewportCount;
6975 const Viewport* pViewports;
6976 uint32_t scissorCount;
6977 const Rect2D* pScissors;
6978 };
6979 static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
6980
6981 struct PipelineRasterizationStateCreateInfo
6982 {
6983 PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 )
6984 : sType( StructureType::ePipelineRasterizationStateCreateInfo )
6985 , pNext( nullptr )
6986 , flags( flags_ )
6987 , depthClampEnable( depthClampEnable_ )
6988 , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
6989 , polygonMode( polygonMode_ )
6990 , cullMode( cullMode_ )
6991 , frontFace( frontFace_ )
6992 , depthBiasEnable( depthBiasEnable_ )
6993 , depthBiasConstantFactor( depthBiasConstantFactor_ )
6994 , depthBiasClamp( depthBiasClamp_ )
6995 , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
6996 , lineWidth( lineWidth_ )
6997 {
6998 }
6999
7000 PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
7001 {
7002 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7003 }
7004
7005 PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
7006 {
7007 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7008 return *this;
7009 }
7010
7011 PipelineRasterizationStateCreateInfo& setSType( StructureType sType_ )
7012 {
7013 sType = sType_;
7014 return *this;
7015 }
7016
7017 PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
7018 {
7019 pNext = pNext_;
7020 return *this;
7021 }
7022
7023 PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
7024 {
7025 flags = flags_;
7026 return *this;
7027 }
7028
7029 PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
7030 {
7031 depthClampEnable = depthClampEnable_;
7032 return *this;
7033 }
7034
7035 PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
7036 {
7037 rasterizerDiscardEnable = rasterizerDiscardEnable_;
7038 return *this;
7039 }
7040
7041 PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
7042 {
7043 polygonMode = polygonMode_;
7044 return *this;
7045 }
7046
7047 PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
7048 {
7049 cullMode = cullMode_;
7050 return *this;
7051 }
7052
7053 PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
7054 {
7055 frontFace = frontFace_;
7056 return *this;
7057 }
7058
7059 PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
7060 {
7061 depthBiasEnable = depthBiasEnable_;
7062 return *this;
7063 }
7064
7065 PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
7066 {
7067 depthBiasConstantFactor = depthBiasConstantFactor_;
7068 return *this;
7069 }
7070
7071 PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
7072 {
7073 depthBiasClamp = depthBiasClamp_;
7074 return *this;
7075 }
7076
7077 PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
7078 {
7079 depthBiasSlopeFactor = depthBiasSlopeFactor_;
7080 return *this;
7081 }
7082
7083 PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
7084 {
7085 lineWidth = lineWidth_;
7086 return *this;
7087 }
7088
7089 operator const VkPipelineRasterizationStateCreateInfo&() const
7090 {
7091 return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
7092 }
7093
7094 bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
7095 {
7096 return ( sType == rhs.sType )
7097 && ( pNext == rhs.pNext )
7098 && ( flags == rhs.flags )
7099 && ( depthClampEnable == rhs.depthClampEnable )
7100 && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
7101 && ( polygonMode == rhs.polygonMode )
7102 && ( cullMode == rhs.cullMode )
7103 && ( frontFace == rhs.frontFace )
7104 && ( depthBiasEnable == rhs.depthBiasEnable )
7105 && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
7106 && ( depthBiasClamp == rhs.depthBiasClamp )
7107 && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
7108 && ( lineWidth == rhs.lineWidth );
7109 }
7110
7111 bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
7112 {
7113 return !operator==( rhs );
7114 }
7115
7116 private:
7117 StructureType sType;
7118
7119 public:
7120 const void* pNext;
7121 PipelineRasterizationStateCreateFlags flags;
7122 Bool32 depthClampEnable;
7123 Bool32 rasterizerDiscardEnable;
7124 PolygonMode polygonMode;
7125 CullModeFlags cullMode;
7126 FrontFace frontFace;
7127 Bool32 depthBiasEnable;
7128 float depthBiasConstantFactor;
7129 float depthBiasClamp;
7130 float depthBiasSlopeFactor;
7131 float lineWidth;
7132 };
7133 static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
7134
7135 struct PipelineDepthStencilStateCreateInfo
7136 {
7137 PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 )
7138 : sType( StructureType::ePipelineDepthStencilStateCreateInfo )
7139 , pNext( nullptr )
7140 , flags( flags_ )
7141 , depthTestEnable( depthTestEnable_ )
7142 , depthWriteEnable( depthWriteEnable_ )
7143 , depthCompareOp( depthCompareOp_ )
7144 , depthBoundsTestEnable( depthBoundsTestEnable_ )
7145 , stencilTestEnable( stencilTestEnable_ )
7146 , front( front_ )
7147 , back( back_ )
7148 , minDepthBounds( minDepthBounds_ )
7149 , maxDepthBounds( maxDepthBounds_ )
7150 {
7151 }
7152
7153 PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
7154 {
7155 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7156 }
7157
7158 PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
7159 {
7160 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7161 return *this;
7162 }
7163
7164 PipelineDepthStencilStateCreateInfo& setSType( StructureType sType_ )
7165 {
7166 sType = sType_;
7167 return *this;
7168 }
7169
7170 PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
7171 {
7172 pNext = pNext_;
7173 return *this;
7174 }
7175
7176 PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
7177 {
7178 flags = flags_;
7179 return *this;
7180 }
7181
7182 PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
7183 {
7184 depthTestEnable = depthTestEnable_;
7185 return *this;
7186 }
7187
7188 PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
7189 {
7190 depthWriteEnable = depthWriteEnable_;
7191 return *this;
7192 }
7193
7194 PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
7195 {
7196 depthCompareOp = depthCompareOp_;
7197 return *this;
7198 }
7199
7200 PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
7201 {
7202 depthBoundsTestEnable = depthBoundsTestEnable_;
7203 return *this;
7204 }
7205
7206 PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
7207 {
7208 stencilTestEnable = stencilTestEnable_;
7209 return *this;
7210 }
7211
7212 PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
7213 {
7214 front = front_;
7215 return *this;
7216 }
7217
7218 PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
7219 {
7220 back = back_;
7221 return *this;
7222 }
7223
7224 PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
7225 {
7226 minDepthBounds = minDepthBounds_;
7227 return *this;
7228 }
7229
7230 PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
7231 {
7232 maxDepthBounds = maxDepthBounds_;
7233 return *this;
7234 }
7235
7236 operator const VkPipelineDepthStencilStateCreateInfo&() const
7237 {
7238 return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
7239 }
7240
7241 bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
7242 {
7243 return ( sType == rhs.sType )
7244 && ( pNext == rhs.pNext )
7245 && ( flags == rhs.flags )
7246 && ( depthTestEnable == rhs.depthTestEnable )
7247 && ( depthWriteEnable == rhs.depthWriteEnable )
7248 && ( depthCompareOp == rhs.depthCompareOp )
7249 && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
7250 && ( stencilTestEnable == rhs.stencilTestEnable )
7251 && ( front == rhs.front )
7252 && ( back == rhs.back )
7253 && ( minDepthBounds == rhs.minDepthBounds )
7254 && ( maxDepthBounds == rhs.maxDepthBounds );
7255 }
7256
7257 bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
7258 {
7259 return !operator==( rhs );
7260 }
7261
7262 private:
7263 StructureType sType;
7264
7265 public:
7266 const void* pNext;
7267 PipelineDepthStencilStateCreateFlags flags;
7268 Bool32 depthTestEnable;
7269 Bool32 depthWriteEnable;
7270 CompareOp depthCompareOp;
7271 Bool32 depthBoundsTestEnable;
7272 Bool32 stencilTestEnable;
7273 StencilOpState front;
7274 StencilOpState back;
7275 float minDepthBounds;
7276 float maxDepthBounds;
7277 };
7278 static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
7279
7280 struct PipelineCacheCreateInfo
7281 {
7282 PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
7283 : sType( StructureType::ePipelineCacheCreateInfo )
7284 , pNext( nullptr )
7285 , flags( flags_ )
7286 , initialDataSize( initialDataSize_ )
7287 , pInitialData( pInitialData_ )
7288 {
7289 }
7290
7291 PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
7292 {
7293 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7294 }
7295
7296 PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
7297 {
7298 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7299 return *this;
7300 }
7301
7302 PipelineCacheCreateInfo& setSType( StructureType sType_ )
7303 {
7304 sType = sType_;
7305 return *this;
7306 }
7307
7308 PipelineCacheCreateInfo& setPNext( const void* pNext_ )
7309 {
7310 pNext = pNext_;
7311 return *this;
7312 }
7313
7314 PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
7315 {
7316 flags = flags_;
7317 return *this;
7318 }
7319
7320 PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
7321 {
7322 initialDataSize = initialDataSize_;
7323 return *this;
7324 }
7325
7326 PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
7327 {
7328 pInitialData = pInitialData_;
7329 return *this;
7330 }
7331
7332 operator const VkPipelineCacheCreateInfo&() const
7333 {
7334 return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
7335 }
7336
7337 bool operator==( PipelineCacheCreateInfo const& rhs ) const
7338 {
7339 return ( sType == rhs.sType )
7340 && ( pNext == rhs.pNext )
7341 && ( flags == rhs.flags )
7342 && ( initialDataSize == rhs.initialDataSize )
7343 && ( pInitialData == rhs.pInitialData );
7344 }
7345
7346 bool operator!=( PipelineCacheCreateInfo const& rhs ) const
7347 {
7348 return !operator==( rhs );
7349 }
7350
7351 private:
7352 StructureType sType;
7353
7354 public:
7355 const void* pNext;
7356 PipelineCacheCreateFlags flags;
7357 size_t initialDataSize;
7358 const void* pInitialData;
7359 };
7360 static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
7361
7362 struct SamplerCreateInfo
7363 {
7364 SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 )
7365 : sType( StructureType::eSamplerCreateInfo )
7366 , pNext( nullptr )
7367 , flags( flags_ )
7368 , magFilter( magFilter_ )
7369 , minFilter( minFilter_ )
7370 , mipmapMode( mipmapMode_ )
7371 , addressModeU( addressModeU_ )
7372 , addressModeV( addressModeV_ )
7373 , addressModeW( addressModeW_ )
7374 , mipLodBias( mipLodBias_ )
7375 , anisotropyEnable( anisotropyEnable_ )
7376 , maxAnisotropy( maxAnisotropy_ )
7377 , compareEnable( compareEnable_ )
7378 , compareOp( compareOp_ )
7379 , minLod( minLod_ )
7380 , maxLod( maxLod_ )
7381 , borderColor( borderColor_ )
7382 , unnormalizedCoordinates( unnormalizedCoordinates_ )
7383 {
7384 }
7385
7386 SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
7387 {
7388 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7389 }
7390
7391 SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
7392 {
7393 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7394 return *this;
7395 }
7396
7397 SamplerCreateInfo& setSType( StructureType sType_ )
7398 {
7399 sType = sType_;
7400 return *this;
7401 }
7402
7403 SamplerCreateInfo& setPNext( const void* pNext_ )
7404 {
7405 pNext = pNext_;
7406 return *this;
7407 }
7408
7409 SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
7410 {
7411 flags = flags_;
7412 return *this;
7413 }
7414
7415 SamplerCreateInfo& setMagFilter( Filter magFilter_ )
7416 {
7417 magFilter = magFilter_;
7418 return *this;
7419 }
7420
7421 SamplerCreateInfo& setMinFilter( Filter minFilter_ )
7422 {
7423 minFilter = minFilter_;
7424 return *this;
7425 }
7426
7427 SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
7428 {
7429 mipmapMode = mipmapMode_;
7430 return *this;
7431 }
7432
7433 SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
7434 {
7435 addressModeU = addressModeU_;
7436 return *this;
7437 }
7438
7439 SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
7440 {
7441 addressModeV = addressModeV_;
7442 return *this;
7443 }
7444
7445 SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
7446 {
7447 addressModeW = addressModeW_;
7448 return *this;
7449 }
7450
7451 SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
7452 {
7453 mipLodBias = mipLodBias_;
7454 return *this;
7455 }
7456
7457 SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
7458 {
7459 anisotropyEnable = anisotropyEnable_;
7460 return *this;
7461 }
7462
7463 SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
7464 {
7465 maxAnisotropy = maxAnisotropy_;
7466 return *this;
7467 }
7468
7469 SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
7470 {
7471 compareEnable = compareEnable_;
7472 return *this;
7473 }
7474
7475 SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
7476 {
7477 compareOp = compareOp_;
7478 return *this;
7479 }
7480
7481 SamplerCreateInfo& setMinLod( float minLod_ )
7482 {
7483 minLod = minLod_;
7484 return *this;
7485 }
7486
7487 SamplerCreateInfo& setMaxLod( float maxLod_ )
7488 {
7489 maxLod = maxLod_;
7490 return *this;
7491 }
7492
7493 SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
7494 {
7495 borderColor = borderColor_;
7496 return *this;
7497 }
7498
7499 SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
7500 {
7501 unnormalizedCoordinates = unnormalizedCoordinates_;
7502 return *this;
7503 }
7504
7505 operator const VkSamplerCreateInfo&() const
7506 {
7507 return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
7508 }
7509
7510 bool operator==( SamplerCreateInfo const& rhs ) const
7511 {
7512 return ( sType == rhs.sType )
7513 && ( pNext == rhs.pNext )
7514 && ( flags == rhs.flags )
7515 && ( magFilter == rhs.magFilter )
7516 && ( minFilter == rhs.minFilter )
7517 && ( mipmapMode == rhs.mipmapMode )
7518 && ( addressModeU == rhs.addressModeU )
7519 && ( addressModeV == rhs.addressModeV )
7520 && ( addressModeW == rhs.addressModeW )
7521 && ( mipLodBias == rhs.mipLodBias )
7522 && ( anisotropyEnable == rhs.anisotropyEnable )
7523 && ( maxAnisotropy == rhs.maxAnisotropy )
7524 && ( compareEnable == rhs.compareEnable )
7525 && ( compareOp == rhs.compareOp )
7526 && ( minLod == rhs.minLod )
7527 && ( maxLod == rhs.maxLod )
7528 && ( borderColor == rhs.borderColor )
7529 && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
7530 }
7531
7532 bool operator!=( SamplerCreateInfo const& rhs ) const
7533 {
7534 return !operator==( rhs );
7535 }
7536
7537 private:
7538 StructureType sType;
7539
7540 public:
7541 const void* pNext;
7542 SamplerCreateFlags flags;
7543 Filter magFilter;
7544 Filter minFilter;
7545 SamplerMipmapMode mipmapMode;
7546 SamplerAddressMode addressModeU;
7547 SamplerAddressMode addressModeV;
7548 SamplerAddressMode addressModeW;
7549 float mipLodBias;
7550 Bool32 anisotropyEnable;
7551 float maxAnisotropy;
7552 Bool32 compareEnable;
7553 CompareOp compareOp;
7554 float minLod;
7555 float maxLod;
7556 BorderColor borderColor;
7557 Bool32 unnormalizedCoordinates;
7558 };
7559 static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
7560
7561 struct CommandBufferAllocateInfo
7562 {
7563 CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 )
7564 : sType( StructureType::eCommandBufferAllocateInfo )
7565 , pNext( nullptr )
7566 , commandPool( commandPool_ )
7567 , level( level_ )
7568 , commandBufferCount( commandBufferCount_ )
7569 {
7570 }
7571
7572 CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
7573 {
7574 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7575 }
7576
7577 CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
7578 {
7579 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7580 return *this;
7581 }
7582
7583 CommandBufferAllocateInfo& setSType( StructureType sType_ )
7584 {
7585 sType = sType_;
7586 return *this;
7587 }
7588
7589 CommandBufferAllocateInfo& setPNext( const void* pNext_ )
7590 {
7591 pNext = pNext_;
7592 return *this;
7593 }
7594
7595 CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
7596 {
7597 commandPool = commandPool_;
7598 return *this;
7599 }
7600
7601 CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
7602 {
7603 level = level_;
7604 return *this;
7605 }
7606
7607 CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
7608 {
7609 commandBufferCount = commandBufferCount_;
7610 return *this;
7611 }
7612
7613 operator const VkCommandBufferAllocateInfo&() const
7614 {
7615 return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
7616 }
7617
7618 bool operator==( CommandBufferAllocateInfo const& rhs ) const
7619 {
7620 return ( sType == rhs.sType )
7621 && ( pNext == rhs.pNext )
7622 && ( commandPool == rhs.commandPool )
7623 && ( level == rhs.level )
7624 && ( commandBufferCount == rhs.commandBufferCount );
7625 }
7626
7627 bool operator!=( CommandBufferAllocateInfo const& rhs ) const
7628 {
7629 return !operator==( rhs );
7630 }
7631
7632 private:
7633 StructureType sType;
7634
7635 public:
7636 const void* pNext;
7637 CommandPool commandPool;
7638 CommandBufferLevel level;
7639 uint32_t commandBufferCount;
7640 };
7641 static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
7642
7643 struct RenderPassBeginInfo
7644 {
7645 RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr )
7646 : sType( StructureType::eRenderPassBeginInfo )
7647 , pNext( nullptr )
7648 , renderPass( renderPass_ )
7649 , framebuffer( framebuffer_ )
7650 , renderArea( renderArea_ )
7651 , clearValueCount( clearValueCount_ )
7652 , pClearValues( pClearValues_ )
7653 {
7654 }
7655
7656 RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
7657 {
7658 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7659 }
7660
7661 RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
7662 {
7663 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7664 return *this;
7665 }
7666
7667 RenderPassBeginInfo& setSType( StructureType sType_ )
7668 {
7669 sType = sType_;
7670 return *this;
7671 }
7672
7673 RenderPassBeginInfo& setPNext( const void* pNext_ )
7674 {
7675 pNext = pNext_;
7676 return *this;
7677 }
7678
7679 RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
7680 {
7681 renderPass = renderPass_;
7682 return *this;
7683 }
7684
7685 RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
7686 {
7687 framebuffer = framebuffer_;
7688 return *this;
7689 }
7690
7691 RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
7692 {
7693 renderArea = renderArea_;
7694 return *this;
7695 }
7696
7697 RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
7698 {
7699 clearValueCount = clearValueCount_;
7700 return *this;
7701 }
7702
7703 RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
7704 {
7705 pClearValues = pClearValues_;
7706 return *this;
7707 }
7708
7709 operator const VkRenderPassBeginInfo&() const
7710 {
7711 return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
7712 }
7713
7714 bool operator==( RenderPassBeginInfo const& rhs ) const
7715 {
7716 return ( sType == rhs.sType )
7717 && ( pNext == rhs.pNext )
7718 && ( renderPass == rhs.renderPass )
7719 && ( framebuffer == rhs.framebuffer )
7720 && ( renderArea == rhs.renderArea )
7721 && ( clearValueCount == rhs.clearValueCount )
7722 && ( pClearValues == rhs.pClearValues );
7723 }
7724
7725 bool operator!=( RenderPassBeginInfo const& rhs ) const
7726 {
7727 return !operator==( rhs );
7728 }
7729
7730 private:
7731 StructureType sType;
7732
7733 public:
7734 const void* pNext;
7735 RenderPass renderPass;
7736 Framebuffer framebuffer;
7737 Rect2D renderArea;
7738 uint32_t clearValueCount;
7739 const ClearValue* pClearValues;
7740 };
7741 static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
7742
7743 struct EventCreateInfo
7744 {
7745 EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
7746 : sType( StructureType::eEventCreateInfo )
7747 , pNext( nullptr )
7748 , flags( flags_ )
7749 {
7750 }
7751
7752 EventCreateInfo( VkEventCreateInfo const & rhs )
7753 {
7754 memcpy( this, &rhs, sizeof(EventCreateInfo) );
7755 }
7756
7757 EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
7758 {
7759 memcpy( this, &rhs, sizeof(EventCreateInfo) );
7760 return *this;
7761 }
7762
7763 EventCreateInfo& setSType( StructureType sType_ )
7764 {
7765 sType = sType_;
7766 return *this;
7767 }
7768
7769 EventCreateInfo& setPNext( const void* pNext_ )
7770 {
7771 pNext = pNext_;
7772 return *this;
7773 }
7774
7775 EventCreateInfo& setFlags( EventCreateFlags flags_ )
7776 {
7777 flags = flags_;
7778 return *this;
7779 }
7780
7781 operator const VkEventCreateInfo&() const
7782 {
7783 return *reinterpret_cast<const VkEventCreateInfo*>(this);
7784 }
7785
7786 bool operator==( EventCreateInfo const& rhs ) const
7787 {
7788 return ( sType == rhs.sType )
7789 && ( pNext == rhs.pNext )
7790 && ( flags == rhs.flags );
7791 }
7792
7793 bool operator!=( EventCreateInfo const& rhs ) const
7794 {
7795 return !operator==( rhs );
7796 }
7797
7798 private:
7799 StructureType sType;
7800
7801 public:
7802 const void* pNext;
7803 EventCreateFlags flags;
7804 };
7805 static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
7806
7807 struct SemaphoreCreateInfo
7808 {
7809 SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
7810 : sType( StructureType::eSemaphoreCreateInfo )
7811 , pNext( nullptr )
7812 , flags( flags_ )
7813 {
7814 }
7815
7816 SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
7817 {
7818 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
7819 }
7820
7821 SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
7822 {
7823 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
7824 return *this;
7825 }
7826
7827 SemaphoreCreateInfo& setSType( StructureType sType_ )
7828 {
7829 sType = sType_;
7830 return *this;
7831 }
7832
7833 SemaphoreCreateInfo& setPNext( const void* pNext_ )
7834 {
7835 pNext = pNext_;
7836 return *this;
7837 }
7838
7839 SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
7840 {
7841 flags = flags_;
7842 return *this;
7843 }
7844
7845 operator const VkSemaphoreCreateInfo&() const
7846 {
7847 return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
7848 }
7849
7850 bool operator==( SemaphoreCreateInfo const& rhs ) const
7851 {
7852 return ( sType == rhs.sType )
7853 && ( pNext == rhs.pNext )
7854 && ( flags == rhs.flags );
7855 }
7856
7857 bool operator!=( SemaphoreCreateInfo const& rhs ) const
7858 {
7859 return !operator==( rhs );
7860 }
7861
7862 private:
7863 StructureType sType;
7864
7865 public:
7866 const void* pNext;
7867 SemaphoreCreateFlags flags;
7868 };
7869 static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
7870
7871 struct FramebufferCreateInfo
7872 {
7873 FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 )
7874 : sType( StructureType::eFramebufferCreateInfo )
7875 , pNext( nullptr )
7876 , flags( flags_ )
7877 , renderPass( renderPass_ )
7878 , attachmentCount( attachmentCount_ )
7879 , pAttachments( pAttachments_ )
7880 , width( width_ )
7881 , height( height_ )
7882 , layers( layers_ )
7883 {
7884 }
7885
7886 FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
7887 {
7888 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
7889 }
7890
7891 FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
7892 {
7893 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
7894 return *this;
7895 }
7896
7897 FramebufferCreateInfo& setSType( StructureType sType_ )
7898 {
7899 sType = sType_;
7900 return *this;
7901 }
7902
7903 FramebufferCreateInfo& setPNext( const void* pNext_ )
7904 {
7905 pNext = pNext_;
7906 return *this;
7907 }
7908
7909 FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
7910 {
7911 flags = flags_;
7912 return *this;
7913 }
7914
7915 FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
7916 {
7917 renderPass = renderPass_;
7918 return *this;
7919 }
7920
7921 FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
7922 {
7923 attachmentCount = attachmentCount_;
7924 return *this;
7925 }
7926
7927 FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
7928 {
7929 pAttachments = pAttachments_;
7930 return *this;
7931 }
7932
7933 FramebufferCreateInfo& setWidth( uint32_t width_ )
7934 {
7935 width = width_;
7936 return *this;
7937 }
7938
7939 FramebufferCreateInfo& setHeight( uint32_t height_ )
7940 {
7941 height = height_;
7942 return *this;
7943 }
7944
7945 FramebufferCreateInfo& setLayers( uint32_t layers_ )
7946 {
7947 layers = layers_;
7948 return *this;
7949 }
7950
7951 operator const VkFramebufferCreateInfo&() const
7952 {
7953 return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
7954 }
7955
7956 bool operator==( FramebufferCreateInfo const& rhs ) const
7957 {
7958 return ( sType == rhs.sType )
7959 && ( pNext == rhs.pNext )
7960 && ( flags == rhs.flags )
7961 && ( renderPass == rhs.renderPass )
7962 && ( attachmentCount == rhs.attachmentCount )
7963 && ( pAttachments == rhs.pAttachments )
7964 && ( width == rhs.width )
7965 && ( height == rhs.height )
7966 && ( layers == rhs.layers );
7967 }
7968
7969 bool operator!=( FramebufferCreateInfo const& rhs ) const
7970 {
7971 return !operator==( rhs );
7972 }
7973
7974 private:
7975 StructureType sType;
7976
7977 public:
7978 const void* pNext;
7979 FramebufferCreateFlags flags;
7980 RenderPass renderPass;
7981 uint32_t attachmentCount;
7982 const ImageView* pAttachments;
7983 uint32_t width;
7984 uint32_t height;
7985 uint32_t layers;
7986 };
7987 static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
7988
7989 struct DisplayModeCreateInfoKHR
7990 {
7991 DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
7992 : sType( StructureType::eDisplayModeCreateInfoKHR )
7993 , pNext( nullptr )
7994 , flags( flags_ )
7995 , parameters( parameters_ )
7996 {
7997 }
7998
7999 DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
8000 {
8001 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8002 }
8003
8004 DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
8005 {
8006 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8007 return *this;
8008 }
8009
8010 DisplayModeCreateInfoKHR& setSType( StructureType sType_ )
8011 {
8012 sType = sType_;
8013 return *this;
8014 }
8015
8016 DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
8017 {
8018 pNext = pNext_;
8019 return *this;
8020 }
8021
8022 DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
8023 {
8024 flags = flags_;
8025 return *this;
8026 }
8027
8028 DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
8029 {
8030 parameters = parameters_;
8031 return *this;
8032 }
8033
8034 operator const VkDisplayModeCreateInfoKHR&() const
8035 {
8036 return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
8037 }
8038
8039 bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
8040 {
8041 return ( sType == rhs.sType )
8042 && ( pNext == rhs.pNext )
8043 && ( flags == rhs.flags )
8044 && ( parameters == rhs.parameters );
8045 }
8046
8047 bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
8048 {
8049 return !operator==( rhs );
8050 }
8051
8052 private:
8053 StructureType sType;
8054
8055 public:
8056 const void* pNext;
8057 DisplayModeCreateFlagsKHR flags;
8058 DisplayModeParametersKHR parameters;
8059 };
8060 static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
8061
8062 struct DisplayPresentInfoKHR
8063 {
8064 DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 )
8065 : sType( StructureType::eDisplayPresentInfoKHR )
8066 , pNext( nullptr )
8067 , srcRect( srcRect_ )
8068 , dstRect( dstRect_ )
8069 , persistent( persistent_ )
8070 {
8071 }
8072
8073 DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
8074 {
8075 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8076 }
8077
8078 DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
8079 {
8080 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8081 return *this;
8082 }
8083
8084 DisplayPresentInfoKHR& setSType( StructureType sType_ )
8085 {
8086 sType = sType_;
8087 return *this;
8088 }
8089
8090 DisplayPresentInfoKHR& setPNext( const void* pNext_ )
8091 {
8092 pNext = pNext_;
8093 return *this;
8094 }
8095
8096 DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
8097 {
8098 srcRect = srcRect_;
8099 return *this;
8100 }
8101
8102 DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
8103 {
8104 dstRect = dstRect_;
8105 return *this;
8106 }
8107
8108 DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
8109 {
8110 persistent = persistent_;
8111 return *this;
8112 }
8113
8114 operator const VkDisplayPresentInfoKHR&() const
8115 {
8116 return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
8117 }
8118
8119 bool operator==( DisplayPresentInfoKHR const& rhs ) const
8120 {
8121 return ( sType == rhs.sType )
8122 && ( pNext == rhs.pNext )
8123 && ( srcRect == rhs.srcRect )
8124 && ( dstRect == rhs.dstRect )
8125 && ( persistent == rhs.persistent );
8126 }
8127
8128 bool operator!=( DisplayPresentInfoKHR const& rhs ) const
8129 {
8130 return !operator==( rhs );
8131 }
8132
8133 private:
8134 StructureType sType;
8135
8136 public:
8137 const void* pNext;
8138 Rect2D srcRect;
8139 Rect2D dstRect;
8140 Bool32 persistent;
8141 };
8142 static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
8143
8144#ifdef VK_USE_PLATFORM_ANDROID_KHR
8145 struct AndroidSurfaceCreateInfoKHR
8146 {
8147 AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), ANativeWindow* window_ = nullptr )
8148 : sType( StructureType::eAndroidSurfaceCreateInfoKHR )
8149 , pNext( nullptr )
8150 , flags( flags_ )
8151 , window( window_ )
8152 {
8153 }
8154
8155 AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
8156 {
8157 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8158 }
8159
8160 AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
8161 {
8162 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8163 return *this;
8164 }
8165
8166 AndroidSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8167 {
8168 sType = sType_;
8169 return *this;
8170 }
8171
8172 AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8173 {
8174 pNext = pNext_;
8175 return *this;
8176 }
8177
8178 AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
8179 {
8180 flags = flags_;
8181 return *this;
8182 }
8183
8184 AndroidSurfaceCreateInfoKHR& setWindow( ANativeWindow* window_ )
8185 {
8186 window = window_;
8187 return *this;
8188 }
8189
8190 operator const VkAndroidSurfaceCreateInfoKHR&() const
8191 {
8192 return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
8193 }
8194
8195 bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
8196 {
8197 return ( sType == rhs.sType )
8198 && ( pNext == rhs.pNext )
8199 && ( flags == rhs.flags )
8200 && ( window == rhs.window );
8201 }
8202
8203 bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
8204 {
8205 return !operator==( rhs );
8206 }
8207
8208 private:
8209 StructureType sType;
8210
8211 public:
8212 const void* pNext;
8213 AndroidSurfaceCreateFlagsKHR flags;
8214 ANativeWindow* window;
8215 };
8216 static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8217#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
8218
8219#ifdef VK_USE_PLATFORM_MIR_KHR
8220 struct MirSurfaceCreateInfoKHR
8221 {
8222 MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr )
8223 : sType( StructureType::eMirSurfaceCreateInfoKHR )
8224 , pNext( nullptr )
8225 , flags( flags_ )
8226 , connection( connection_ )
8227 , mirSurface( mirSurface_ )
8228 {
8229 }
8230
8231 MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs )
8232 {
8233 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8234 }
8235
8236 MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs )
8237 {
8238 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8239 return *this;
8240 }
8241
8242 MirSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8243 {
8244 sType = sType_;
8245 return *this;
8246 }
8247
8248 MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8249 {
8250 pNext = pNext_;
8251 return *this;
8252 }
8253
8254 MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ )
8255 {
8256 flags = flags_;
8257 return *this;
8258 }
8259
8260 MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ )
8261 {
8262 connection = connection_;
8263 return *this;
8264 }
8265
8266 MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ )
8267 {
8268 mirSurface = mirSurface_;
8269 return *this;
8270 }
8271
8272 operator const VkMirSurfaceCreateInfoKHR&() const
8273 {
8274 return *reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>(this);
8275 }
8276
8277 bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const
8278 {
8279 return ( sType == rhs.sType )
8280 && ( pNext == rhs.pNext )
8281 && ( flags == rhs.flags )
8282 && ( connection == rhs.connection )
8283 && ( mirSurface == rhs.mirSurface );
8284 }
8285
8286 bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const
8287 {
8288 return !operator==( rhs );
8289 }
8290
8291 private:
8292 StructureType sType;
8293
8294 public:
8295 const void* pNext;
8296 MirSurfaceCreateFlagsKHR flags;
8297 MirConnection* connection;
8298 MirSurface* mirSurface;
8299 };
8300 static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8301#endif /*VK_USE_PLATFORM_MIR_KHR*/
8302
8303#ifdef VK_USE_PLATFORM_WAYLAND_KHR
8304 struct WaylandSurfaceCreateInfoKHR
8305 {
8306 WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr )
8307 : sType( StructureType::eWaylandSurfaceCreateInfoKHR )
8308 , pNext( nullptr )
8309 , flags( flags_ )
8310 , display( display_ )
8311 , surface( surface_ )
8312 {
8313 }
8314
8315 WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
8316 {
8317 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8318 }
8319
8320 WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
8321 {
8322 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8323 return *this;
8324 }
8325
8326 WaylandSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8327 {
8328 sType = sType_;
8329 return *this;
8330 }
8331
8332 WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8333 {
8334 pNext = pNext_;
8335 return *this;
8336 }
8337
8338 WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
8339 {
8340 flags = flags_;
8341 return *this;
8342 }
8343
8344 WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
8345 {
8346 display = display_;
8347 return *this;
8348 }
8349
8350 WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
8351 {
8352 surface = surface_;
8353 return *this;
8354 }
8355
8356 operator const VkWaylandSurfaceCreateInfoKHR&() const
8357 {
8358 return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
8359 }
8360
8361 bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
8362 {
8363 return ( sType == rhs.sType )
8364 && ( pNext == rhs.pNext )
8365 && ( flags == rhs.flags )
8366 && ( display == rhs.display )
8367 && ( surface == rhs.surface );
8368 }
8369
8370 bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
8371 {
8372 return !operator==( rhs );
8373 }
8374
8375 private:
8376 StructureType sType;
8377
8378 public:
8379 const void* pNext;
8380 WaylandSurfaceCreateFlagsKHR flags;
8381 struct wl_display* display;
8382 struct wl_surface* surface;
8383 };
8384 static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8385#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
8386
8387#ifdef VK_USE_PLATFORM_WIN32_KHR
8388 struct Win32SurfaceCreateInfoKHR
8389 {
8390 Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 )
8391 : sType( StructureType::eWin32SurfaceCreateInfoKHR )
8392 , pNext( nullptr )
8393 , flags( flags_ )
8394 , hinstance( hinstance_ )
8395 , hwnd( hwnd_ )
8396 {
8397 }
8398
8399 Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
8400 {
8401 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8402 }
8403
8404 Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
8405 {
8406 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8407 return *this;
8408 }
8409
8410 Win32SurfaceCreateInfoKHR& setSType( StructureType sType_ )
8411 {
8412 sType = sType_;
8413 return *this;
8414 }
8415
8416 Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8417 {
8418 pNext = pNext_;
8419 return *this;
8420 }
8421
8422 Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
8423 {
8424 flags = flags_;
8425 return *this;
8426 }
8427
8428 Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
8429 {
8430 hinstance = hinstance_;
8431 return *this;
8432 }
8433
8434 Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
8435 {
8436 hwnd = hwnd_;
8437 return *this;
8438 }
8439
8440 operator const VkWin32SurfaceCreateInfoKHR&() const
8441 {
8442 return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
8443 }
8444
8445 bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
8446 {
8447 return ( sType == rhs.sType )
8448 && ( pNext == rhs.pNext )
8449 && ( flags == rhs.flags )
8450 && ( hinstance == rhs.hinstance )
8451 && ( hwnd == rhs.hwnd );
8452 }
8453
8454 bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
8455 {
8456 return !operator==( rhs );
8457 }
8458
8459 private:
8460 StructureType sType;
8461
8462 public:
8463 const void* pNext;
8464 Win32SurfaceCreateFlagsKHR flags;
8465 HINSTANCE hinstance;
8466 HWND hwnd;
8467 };
8468 static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8469#endif /*VK_USE_PLATFORM_WIN32_KHR*/
8470
8471#ifdef VK_USE_PLATFORM_XLIB_KHR
8472 struct XlibSurfaceCreateInfoKHR
8473 {
8474 XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 )
8475 : sType( StructureType::eXlibSurfaceCreateInfoKHR )
8476 , pNext( nullptr )
8477 , flags( flags_ )
8478 , dpy( dpy_ )
8479 , window( window_ )
8480 {
8481 }
8482
8483 XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
8484 {
8485 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8486 }
8487
8488 XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
8489 {
8490 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8491 return *this;
8492 }
8493
8494 XlibSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8495 {
8496 sType = sType_;
8497 return *this;
8498 }
8499
8500 XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8501 {
8502 pNext = pNext_;
8503 return *this;
8504 }
8505
8506 XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
8507 {
8508 flags = flags_;
8509 return *this;
8510 }
8511
8512 XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
8513 {
8514 dpy = dpy_;
8515 return *this;
8516 }
8517
8518 XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
8519 {
8520 window = window_;
8521 return *this;
8522 }
8523
8524 operator const VkXlibSurfaceCreateInfoKHR&() const
8525 {
8526 return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
8527 }
8528
8529 bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
8530 {
8531 return ( sType == rhs.sType )
8532 && ( pNext == rhs.pNext )
8533 && ( flags == rhs.flags )
8534 && ( dpy == rhs.dpy )
8535 && ( window == rhs.window );
8536 }
8537
8538 bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
8539 {
8540 return !operator==( rhs );
8541 }
8542
8543 private:
8544 StructureType sType;
8545
8546 public:
8547 const void* pNext;
8548 XlibSurfaceCreateFlagsKHR flags;
8549 Display* dpy;
8550 Window window;
8551 };
8552 static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8553#endif /*VK_USE_PLATFORM_XLIB_KHR*/
8554
8555#ifdef VK_USE_PLATFORM_XCB_KHR
8556 struct XcbSurfaceCreateInfoKHR
8557 {
8558 XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 )
8559 : sType( StructureType::eXcbSurfaceCreateInfoKHR )
8560 , pNext( nullptr )
8561 , flags( flags_ )
8562 , connection( connection_ )
8563 , window( window_ )
8564 {
8565 }
8566
8567 XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
8568 {
8569 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8570 }
8571
8572 XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
8573 {
8574 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8575 return *this;
8576 }
8577
8578 XcbSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8579 {
8580 sType = sType_;
8581 return *this;
8582 }
8583
8584 XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8585 {
8586 pNext = pNext_;
8587 return *this;
8588 }
8589
8590 XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
8591 {
8592 flags = flags_;
8593 return *this;
8594 }
8595
8596 XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
8597 {
8598 connection = connection_;
8599 return *this;
8600 }
8601
8602 XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
8603 {
8604 window = window_;
8605 return *this;
8606 }
8607
8608 operator const VkXcbSurfaceCreateInfoKHR&() const
8609 {
8610 return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
8611 }
8612
8613 bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
8614 {
8615 return ( sType == rhs.sType )
8616 && ( pNext == rhs.pNext )
8617 && ( flags == rhs.flags )
8618 && ( connection == rhs.connection )
8619 && ( window == rhs.window );
8620 }
8621
8622 bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
8623 {
8624 return !operator==( rhs );
8625 }
8626
8627 private:
8628 StructureType sType;
8629
8630 public:
8631 const void* pNext;
8632 XcbSurfaceCreateFlagsKHR flags;
8633 xcb_connection_t* connection;
8634 xcb_window_t window;
8635 };
8636 static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8637#endif /*VK_USE_PLATFORM_XCB_KHR*/
8638
8639 struct DebugMarkerMarkerInfoEXT
8640 {
8641 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
8642 : sType( StructureType::eDebugMarkerMarkerInfoEXT )
8643 , pNext( nullptr )
8644 , pMarkerName( pMarkerName_ )
8645 {
8646 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8647 }
8648
8649 DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
8650 {
8651 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8652 }
8653
8654 DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
8655 {
8656 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8657 return *this;
8658 }
8659
8660 DebugMarkerMarkerInfoEXT& setSType( StructureType sType_ )
8661 {
8662 sType = sType_;
8663 return *this;
8664 }
8665
8666 DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
8667 {
8668 pNext = pNext_;
8669 return *this;
8670 }
8671
8672 DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
8673 {
8674 pMarkerName = pMarkerName_;
8675 return *this;
8676 }
8677
8678 DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
8679 {
8680 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8681 return *this;
8682 }
8683
8684 operator const VkDebugMarkerMarkerInfoEXT&() const
8685 {
8686 return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
8687 }
8688
8689 bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
8690 {
8691 return ( sType == rhs.sType )
8692 && ( pNext == rhs.pNext )
8693 && ( pMarkerName == rhs.pMarkerName )
8694 && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
8695 }
8696
8697 bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
8698 {
8699 return !operator==( rhs );
8700 }
8701
8702 private:
8703 StructureType sType;
8704
8705 public:
8706 const void* pNext;
8707 const char* pMarkerName;
8708 float color[4];
8709 };
8710 static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
8711
8712 struct DedicatedAllocationImageCreateInfoNV
8713 {
8714 DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
8715 : sType( StructureType::eDedicatedAllocationImageCreateInfoNV )
8716 , pNext( nullptr )
8717 , dedicatedAllocation( dedicatedAllocation_ )
8718 {
8719 }
8720
8721 DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
8722 {
8723 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
8724 }
8725
8726 DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
8727 {
8728 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
8729 return *this;
8730 }
8731
8732 DedicatedAllocationImageCreateInfoNV& setSType( StructureType sType_ )
8733 {
8734 sType = sType_;
8735 return *this;
8736 }
8737
8738 DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
8739 {
8740 pNext = pNext_;
8741 return *this;
8742 }
8743
8744 DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
8745 {
8746 dedicatedAllocation = dedicatedAllocation_;
8747 return *this;
8748 }
8749
8750 operator const VkDedicatedAllocationImageCreateInfoNV&() const
8751 {
8752 return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
8753 }
8754
8755 bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
8756 {
8757 return ( sType == rhs.sType )
8758 && ( pNext == rhs.pNext )
8759 && ( dedicatedAllocation == rhs.dedicatedAllocation );
8760 }
8761
8762 bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
8763 {
8764 return !operator==( rhs );
8765 }
8766
8767 private:
8768 StructureType sType;
8769
8770 public:
8771 const void* pNext;
8772 Bool32 dedicatedAllocation;
8773 };
8774 static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
8775
8776 struct DedicatedAllocationBufferCreateInfoNV
8777 {
8778 DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
8779 : sType( StructureType::eDedicatedAllocationBufferCreateInfoNV )
8780 , pNext( nullptr )
8781 , dedicatedAllocation( dedicatedAllocation_ )
8782 {
8783 }
8784
8785 DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
8786 {
8787 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
8788 }
8789
8790 DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
8791 {
8792 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
8793 return *this;
8794 }
8795
8796 DedicatedAllocationBufferCreateInfoNV& setSType( StructureType sType_ )
8797 {
8798 sType = sType_;
8799 return *this;
8800 }
8801
8802 DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
8803 {
8804 pNext = pNext_;
8805 return *this;
8806 }
8807
8808 DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
8809 {
8810 dedicatedAllocation = dedicatedAllocation_;
8811 return *this;
8812 }
8813
8814 operator const VkDedicatedAllocationBufferCreateInfoNV&() const
8815 {
8816 return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
8817 }
8818
8819 bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
8820 {
8821 return ( sType == rhs.sType )
8822 && ( pNext == rhs.pNext )
8823 && ( dedicatedAllocation == rhs.dedicatedAllocation );
8824 }
8825
8826 bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
8827 {
8828 return !operator==( rhs );
8829 }
8830
8831 private:
8832 StructureType sType;
8833
8834 public:
8835 const void* pNext;
8836 Bool32 dedicatedAllocation;
8837 };
8838 static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
8839
8840 struct DedicatedAllocationMemoryAllocateInfoNV
8841 {
8842 DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() )
8843 : sType( StructureType::eDedicatedAllocationMemoryAllocateInfoNV )
8844 , pNext( nullptr )
8845 , image( image_ )
8846 , buffer( buffer_ )
8847 {
8848 }
8849
8850 DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
8851 {
8852 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
8853 }
8854
8855 DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
8856 {
8857 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
8858 return *this;
8859 }
8860
8861 DedicatedAllocationMemoryAllocateInfoNV& setSType( StructureType sType_ )
8862 {
8863 sType = sType_;
8864 return *this;
8865 }
8866
8867 DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
8868 {
8869 pNext = pNext_;
8870 return *this;
8871 }
8872
8873 DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
8874 {
8875 image = image_;
8876 return *this;
8877 }
8878
8879 DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
8880 {
8881 buffer = buffer_;
8882 return *this;
8883 }
8884
8885 operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const
8886 {
8887 return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
8888 }
8889
8890 bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
8891 {
8892 return ( sType == rhs.sType )
8893 && ( pNext == rhs.pNext )
8894 && ( image == rhs.image )
8895 && ( buffer == rhs.buffer );
8896 }
8897
8898 bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
8899 {
8900 return !operator==( rhs );
8901 }
8902
8903 private:
8904 StructureType sType;
8905
8906 public:
8907 const void* pNext;
8908 Image image;
8909 Buffer buffer;
8910 };
8911 static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
8912
Lenny Komow6501c122016-08-31 15:03:49 -06008913#ifdef VK_USE_PLATFORM_WIN32_KHR
8914 struct ExportMemoryWin32HandleInfoNV
8915 {
8916 ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 )
8917 : sType( StructureType::eExportMemoryWin32HandleInfoNV )
8918 , pNext( nullptr )
8919 , pAttributes( pAttributes_ )
8920 , dwAccess( dwAccess_ )
8921 {
8922 }
8923
8924 ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
8925 {
8926 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
8927 }
8928
8929 ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
8930 {
8931 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
8932 return *this;
8933 }
8934
8935 ExportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
8936 {
8937 sType = sType_;
8938 return *this;
8939 }
8940
8941 ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
8942 {
8943 pNext = pNext_;
8944 return *this;
8945 }
8946
8947 ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
8948 {
8949 pAttributes = pAttributes_;
8950 return *this;
8951 }
8952
8953 ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
8954 {
8955 dwAccess = dwAccess_;
8956 return *this;
8957 }
8958
8959 operator const VkExportMemoryWin32HandleInfoNV&() const
8960 {
8961 return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
8962 }
8963
8964 bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
8965 {
8966 return ( sType == rhs.sType )
8967 && ( pNext == rhs.pNext )
8968 && ( pAttributes == rhs.pAttributes )
8969 && ( dwAccess == rhs.dwAccess );
8970 }
8971
8972 bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
8973 {
8974 return !operator==( rhs );
8975 }
8976
8977 private:
8978 StructureType sType;
8979
8980 public:
8981 const void* pNext;
8982 const SECURITY_ATTRIBUTES* pAttributes;
8983 DWORD dwAccess;
8984 };
8985 static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
8986#endif /*VK_USE_PLATFORM_WIN32_KHR*/
8987
8988#ifdef VK_USE_PLATFORM_WIN32_KHR
8989 struct Win32KeyedMutexAcquireReleaseInfoNV
8990 {
8991 Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
8992 : sType( StructureType::eWin32KeyedMutexAcquireReleaseInfoNV )
8993 , pNext( nullptr )
8994 , acquireCount( acquireCount_ )
8995 , pAcquireSyncs( pAcquireSyncs_ )
8996 , pAcquireKeys( pAcquireKeys_ )
8997 , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
8998 , releaseCount( releaseCount_ )
8999 , pReleaseSyncs( pReleaseSyncs_ )
9000 , pReleaseKeys( pReleaseKeys_ )
9001 {
9002 }
9003
9004 Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9005 {
9006 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9007 }
9008
9009 Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9010 {
9011 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9012 return *this;
9013 }
9014
9015 Win32KeyedMutexAcquireReleaseInfoNV& setSType( StructureType sType_ )
9016 {
9017 sType = sType_;
9018 return *this;
9019 }
9020
9021 Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
9022 {
9023 pNext = pNext_;
9024 return *this;
9025 }
9026
9027 Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
9028 {
9029 acquireCount = acquireCount_;
9030 return *this;
9031 }
9032
9033 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
9034 {
9035 pAcquireSyncs = pAcquireSyncs_;
9036 return *this;
9037 }
9038
9039 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
9040 {
9041 pAcquireKeys = pAcquireKeys_;
9042 return *this;
9043 }
9044
9045 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
9046 {
9047 pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
9048 return *this;
9049 }
9050
9051 Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
9052 {
9053 releaseCount = releaseCount_;
9054 return *this;
9055 }
9056
9057 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
9058 {
9059 pReleaseSyncs = pReleaseSyncs_;
9060 return *this;
9061 }
9062
9063 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
9064 {
9065 pReleaseKeys = pReleaseKeys_;
9066 return *this;
9067 }
9068
9069 operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const
9070 {
9071 return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
9072 }
9073
9074 bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9075 {
9076 return ( sType == rhs.sType )
9077 && ( pNext == rhs.pNext )
9078 && ( acquireCount == rhs.acquireCount )
9079 && ( pAcquireSyncs == rhs.pAcquireSyncs )
9080 && ( pAcquireKeys == rhs.pAcquireKeys )
9081 && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
9082 && ( releaseCount == rhs.releaseCount )
9083 && ( pReleaseSyncs == rhs.pReleaseSyncs )
9084 && ( pReleaseKeys == rhs.pReleaseKeys );
9085 }
9086
9087 bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9088 {
9089 return !operator==( rhs );
9090 }
9091
9092 private:
9093 StructureType sType;
9094
9095 public:
9096 const void* pNext;
9097 uint32_t acquireCount;
9098 const DeviceMemory* pAcquireSyncs;
9099 const uint64_t* pAcquireKeys;
9100 const uint32_t* pAcquireTimeoutMilliseconds;
9101 uint32_t releaseCount;
9102 const DeviceMemory* pReleaseSyncs;
9103 const uint64_t* pReleaseKeys;
9104 };
9105 static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
9106#endif /*VK_USE_PLATFORM_WIN32_KHR*/
9107
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009108 struct DeviceGeneratedCommandsFeaturesNVX
9109 {
9110 DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
9111 : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
9112 , pNext( nullptr )
9113 , computeBindingPointSupport( computeBindingPointSupport_ )
9114 {
9115 }
9116
9117 DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9118 {
9119 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9120 }
9121
9122 DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9123 {
9124 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9125 return *this;
9126 }
9127
9128 DeviceGeneratedCommandsFeaturesNVX& setSType( StructureType sType_ )
9129 {
9130 sType = sType_;
9131 return *this;
9132 }
9133
9134 DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
9135 {
9136 pNext = pNext_;
9137 return *this;
9138 }
9139
9140 DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
9141 {
9142 computeBindingPointSupport = computeBindingPointSupport_;
9143 return *this;
9144 }
9145
9146 operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
9147 {
9148 return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
9149 }
9150
9151 bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9152 {
9153 return ( sType == rhs.sType )
9154 && ( pNext == rhs.pNext )
9155 && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
9156 }
9157
9158 bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9159 {
9160 return !operator==( rhs );
9161 }
9162
9163 private:
9164 StructureType sType;
9165
9166 public:
9167 const void* pNext;
9168 Bool32 computeBindingPointSupport;
9169 };
9170 static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
9171
9172 struct DeviceGeneratedCommandsLimitsNVX
9173 {
9174 DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
9175 : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
9176 , pNext( nullptr )
9177 , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
9178 , maxObjectEntryCounts( maxObjectEntryCounts_ )
9179 , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
9180 , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
9181 , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
9182 {
9183 }
9184
9185 DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9186 {
9187 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9188 }
9189
9190 DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9191 {
9192 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9193 return *this;
9194 }
9195
9196 DeviceGeneratedCommandsLimitsNVX& setSType( StructureType sType_ )
9197 {
9198 sType = sType_;
9199 return *this;
9200 }
9201
9202 DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
9203 {
9204 pNext = pNext_;
9205 return *this;
9206 }
9207
9208 DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
9209 {
9210 maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
9211 return *this;
9212 }
9213
9214 DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
9215 {
9216 maxObjectEntryCounts = maxObjectEntryCounts_;
9217 return *this;
9218 }
9219
9220 DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
9221 {
9222 minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
9223 return *this;
9224 }
9225
9226 DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
9227 {
9228 minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
9229 return *this;
9230 }
9231
9232 DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
9233 {
9234 minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
9235 return *this;
9236 }
9237
9238 operator const VkDeviceGeneratedCommandsLimitsNVX&() const
9239 {
9240 return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
9241 }
9242
9243 bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9244 {
9245 return ( sType == rhs.sType )
9246 && ( pNext == rhs.pNext )
9247 && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
9248 && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
9249 && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
9250 && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
9251 && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
9252 }
9253
9254 bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9255 {
9256 return !operator==( rhs );
9257 }
9258
9259 private:
9260 StructureType sType;
9261
9262 public:
9263 const void* pNext;
9264 uint32_t maxIndirectCommandsLayoutTokenCount;
9265 uint32_t maxObjectEntryCounts;
9266 uint32_t minSequenceCountBufferOffsetAlignment;
9267 uint32_t minSequenceIndexBufferOffsetAlignment;
9268 uint32_t minCommandsTokenBufferOffsetAlignment;
9269 };
9270 static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
9271
9272 struct CmdReserveSpaceForCommandsInfoNVX
9273 {
9274 CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
9275 : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
9276 , pNext( nullptr )
9277 , objectTable( objectTable_ )
9278 , indirectCommandsLayout( indirectCommandsLayout_ )
9279 , maxSequencesCount( maxSequencesCount_ )
9280 {
9281 }
9282
9283 CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9284 {
9285 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9286 }
9287
9288 CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9289 {
9290 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9291 return *this;
9292 }
9293
9294 CmdReserveSpaceForCommandsInfoNVX& setSType( StructureType sType_ )
9295 {
9296 sType = sType_;
9297 return *this;
9298 }
9299
9300 CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
9301 {
9302 pNext = pNext_;
9303 return *this;
9304 }
9305
9306 CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
9307 {
9308 objectTable = objectTable_;
9309 return *this;
9310 }
9311
9312 CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
9313 {
9314 indirectCommandsLayout = indirectCommandsLayout_;
9315 return *this;
9316 }
9317
9318 CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
9319 {
9320 maxSequencesCount = maxSequencesCount_;
9321 return *this;
9322 }
9323
9324 operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
9325 {
9326 return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
9327 }
9328
9329 bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9330 {
9331 return ( sType == rhs.sType )
9332 && ( pNext == rhs.pNext )
9333 && ( objectTable == rhs.objectTable )
9334 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
9335 && ( maxSequencesCount == rhs.maxSequencesCount );
9336 }
9337
9338 bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9339 {
9340 return !operator==( rhs );
9341 }
9342
9343 private:
9344 StructureType sType;
9345
9346 public:
9347 const void* pNext;
9348 ObjectTableNVX objectTable;
9349 IndirectCommandsLayoutNVX indirectCommandsLayout;
9350 uint32_t maxSequencesCount;
9351 };
9352 static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
9353
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009354 enum class SubpassContents
9355 {
9356 eInline = VK_SUBPASS_CONTENTS_INLINE,
9357 eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
9358 };
9359
9360 struct PresentInfoKHR
9361 {
9362 PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
9363 : sType( StructureType::ePresentInfoKHR )
9364 , pNext( nullptr )
9365 , waitSemaphoreCount( waitSemaphoreCount_ )
9366 , pWaitSemaphores( pWaitSemaphores_ )
9367 , swapchainCount( swapchainCount_ )
9368 , pSwapchains( pSwapchains_ )
9369 , pImageIndices( pImageIndices_ )
9370 , pResults( pResults_ )
9371 {
9372 }
9373
9374 PresentInfoKHR( VkPresentInfoKHR const & rhs )
9375 {
9376 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9377 }
9378
9379 PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
9380 {
9381 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9382 return *this;
9383 }
9384
9385 PresentInfoKHR& setSType( StructureType sType_ )
9386 {
9387 sType = sType_;
9388 return *this;
9389 }
9390
9391 PresentInfoKHR& setPNext( const void* pNext_ )
9392 {
9393 pNext = pNext_;
9394 return *this;
9395 }
9396
9397 PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
9398 {
9399 waitSemaphoreCount = waitSemaphoreCount_;
9400 return *this;
9401 }
9402
9403 PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
9404 {
9405 pWaitSemaphores = pWaitSemaphores_;
9406 return *this;
9407 }
9408
9409 PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
9410 {
9411 swapchainCount = swapchainCount_;
9412 return *this;
9413 }
9414
9415 PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
9416 {
9417 pSwapchains = pSwapchains_;
9418 return *this;
9419 }
9420
9421 PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
9422 {
9423 pImageIndices = pImageIndices_;
9424 return *this;
9425 }
9426
9427 PresentInfoKHR& setPResults( Result* pResults_ )
9428 {
9429 pResults = pResults_;
9430 return *this;
9431 }
9432
9433 operator const VkPresentInfoKHR&() const
9434 {
9435 return *reinterpret_cast<const VkPresentInfoKHR*>(this);
9436 }
9437
9438 bool operator==( PresentInfoKHR const& rhs ) const
9439 {
9440 return ( sType == rhs.sType )
9441 && ( pNext == rhs.pNext )
9442 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
9443 && ( pWaitSemaphores == rhs.pWaitSemaphores )
9444 && ( swapchainCount == rhs.swapchainCount )
9445 && ( pSwapchains == rhs.pSwapchains )
9446 && ( pImageIndices == rhs.pImageIndices )
9447 && ( pResults == rhs.pResults );
9448 }
9449
9450 bool operator!=( PresentInfoKHR const& rhs ) const
9451 {
9452 return !operator==( rhs );
9453 }
9454
9455 private:
9456 StructureType sType;
9457
9458 public:
9459 const void* pNext;
9460 uint32_t waitSemaphoreCount;
9461 const Semaphore* pWaitSemaphores;
9462 uint32_t swapchainCount;
9463 const SwapchainKHR* pSwapchains;
9464 const uint32_t* pImageIndices;
9465 Result* pResults;
9466 };
9467 static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
9468
9469 enum class DynamicState
9470 {
9471 eViewport = VK_DYNAMIC_STATE_VIEWPORT,
9472 eScissor = VK_DYNAMIC_STATE_SCISSOR,
9473 eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
9474 eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
9475 eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
9476 eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
9477 eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
9478 eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
9479 eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
9480 };
9481
9482 struct PipelineDynamicStateCreateInfo
9483 {
9484 PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
9485 : sType( StructureType::ePipelineDynamicStateCreateInfo )
9486 , pNext( nullptr )
9487 , flags( flags_ )
9488 , dynamicStateCount( dynamicStateCount_ )
9489 , pDynamicStates( pDynamicStates_ )
9490 {
9491 }
9492
9493 PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
9494 {
9495 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9496 }
9497
9498 PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
9499 {
9500 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9501 return *this;
9502 }
9503
9504 PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
9505 {
9506 sType = sType_;
9507 return *this;
9508 }
9509
9510 PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
9511 {
9512 pNext = pNext_;
9513 return *this;
9514 }
9515
9516 PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
9517 {
9518 flags = flags_;
9519 return *this;
9520 }
9521
9522 PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
9523 {
9524 dynamicStateCount = dynamicStateCount_;
9525 return *this;
9526 }
9527
9528 PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
9529 {
9530 pDynamicStates = pDynamicStates_;
9531 return *this;
9532 }
9533
9534 operator const VkPipelineDynamicStateCreateInfo&() const
9535 {
9536 return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
9537 }
9538
9539 bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
9540 {
9541 return ( sType == rhs.sType )
9542 && ( pNext == rhs.pNext )
9543 && ( flags == rhs.flags )
9544 && ( dynamicStateCount == rhs.dynamicStateCount )
9545 && ( pDynamicStates == rhs.pDynamicStates );
9546 }
9547
9548 bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
9549 {
9550 return !operator==( rhs );
9551 }
9552
9553 private:
9554 StructureType sType;
9555
9556 public:
9557 const void* pNext;
9558 PipelineDynamicStateCreateFlags flags;
9559 uint32_t dynamicStateCount;
9560 const DynamicState* pDynamicStates;
9561 };
9562 static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
9563
9564 enum class QueueFlagBits
9565 {
9566 eGraphics = VK_QUEUE_GRAPHICS_BIT,
9567 eCompute = VK_QUEUE_COMPUTE_BIT,
9568 eTransfer = VK_QUEUE_TRANSFER_BIT,
9569 eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
9570 };
9571
9572 using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
9573
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009574 VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009575 {
9576 return QueueFlags( bit0 ) | bit1;
9577 }
9578
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009579 VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
9580 {
9581 return ~( QueueFlags( bits ) );
9582 }
9583
9584 template <> struct FlagTraits<QueueFlagBits>
9585 {
9586 enum
9587 {
9588 allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
9589 };
9590 };
9591
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009592 struct QueueFamilyProperties
9593 {
9594 operator const VkQueueFamilyProperties&() const
9595 {
9596 return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
9597 }
9598
9599 bool operator==( QueueFamilyProperties const& rhs ) const
9600 {
9601 return ( queueFlags == rhs.queueFlags )
9602 && ( queueCount == rhs.queueCount )
9603 && ( timestampValidBits == rhs.timestampValidBits )
9604 && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
9605 }
9606
9607 bool operator!=( QueueFamilyProperties const& rhs ) const
9608 {
9609 return !operator==( rhs );
9610 }
9611
9612 QueueFlags queueFlags;
9613 uint32_t queueCount;
9614 uint32_t timestampValidBits;
9615 Extent3D minImageTransferGranularity;
9616 };
9617 static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
9618
9619 enum class MemoryPropertyFlagBits
9620 {
9621 eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
9622 eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
9623 eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
9624 eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
9625 eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
9626 };
9627
9628 using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
9629
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009630 VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009631 {
9632 return MemoryPropertyFlags( bit0 ) | bit1;
9633 }
9634
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009635 VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
9636 {
9637 return ~( MemoryPropertyFlags( bits ) );
9638 }
9639
9640 template <> struct FlagTraits<MemoryPropertyFlagBits>
9641 {
9642 enum
9643 {
9644 allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
9645 };
9646 };
9647
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009648 struct MemoryType
9649 {
9650 operator const VkMemoryType&() const
9651 {
9652 return *reinterpret_cast<const VkMemoryType*>(this);
9653 }
9654
9655 bool operator==( MemoryType const& rhs ) const
9656 {
9657 return ( propertyFlags == rhs.propertyFlags )
9658 && ( heapIndex == rhs.heapIndex );
9659 }
9660
9661 bool operator!=( MemoryType const& rhs ) const
9662 {
9663 return !operator==( rhs );
9664 }
9665
9666 MemoryPropertyFlags propertyFlags;
9667 uint32_t heapIndex;
9668 };
9669 static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
9670
9671 enum class MemoryHeapFlagBits
9672 {
9673 eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT
9674 };
9675
9676 using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
9677
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009678 VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009679 {
9680 return MemoryHeapFlags( bit0 ) | bit1;
9681 }
9682
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009683 VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
9684 {
9685 return ~( MemoryHeapFlags( bits ) );
9686 }
9687
9688 template <> struct FlagTraits<MemoryHeapFlagBits>
9689 {
9690 enum
9691 {
9692 allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal)
9693 };
9694 };
9695
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009696 struct MemoryHeap
9697 {
9698 operator const VkMemoryHeap&() const
9699 {
9700 return *reinterpret_cast<const VkMemoryHeap*>(this);
9701 }
9702
9703 bool operator==( MemoryHeap const& rhs ) const
9704 {
9705 return ( size == rhs.size )
9706 && ( flags == rhs.flags );
9707 }
9708
9709 bool operator!=( MemoryHeap const& rhs ) const
9710 {
9711 return !operator==( rhs );
9712 }
9713
9714 DeviceSize size;
9715 MemoryHeapFlags flags;
9716 };
9717 static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
9718
9719 struct PhysicalDeviceMemoryProperties
9720 {
9721 operator const VkPhysicalDeviceMemoryProperties&() const
9722 {
9723 return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
9724 }
9725
9726 bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
9727 {
9728 return ( memoryTypeCount == rhs.memoryTypeCount )
9729 && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
9730 && ( memoryHeapCount == rhs.memoryHeapCount )
9731 && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
9732 }
9733
9734 bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
9735 {
9736 return !operator==( rhs );
9737 }
9738
9739 uint32_t memoryTypeCount;
9740 MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
9741 uint32_t memoryHeapCount;
9742 MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
9743 };
9744 static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
9745
9746 enum class AccessFlagBits
9747 {
9748 eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
9749 eIndexRead = VK_ACCESS_INDEX_READ_BIT,
9750 eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
9751 eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
9752 eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
9753 eShaderRead = VK_ACCESS_SHADER_READ_BIT,
9754 eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
9755 eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
9756 eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
9757 eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
9758 eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
9759 eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
9760 eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
9761 eHostRead = VK_ACCESS_HOST_READ_BIT,
9762 eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
9763 eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009764 eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
9765 eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
9766 eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009767 };
9768
9769 using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
9770
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009771 VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009772 {
9773 return AccessFlags( bit0 ) | bit1;
9774 }
9775
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009776 VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
9777 {
9778 return ~( AccessFlags( bits ) );
9779 }
9780
9781 template <> struct FlagTraits<AccessFlagBits>
9782 {
9783 enum
9784 {
9785 allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX)
9786 };
9787 };
9788
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009789 struct MemoryBarrier
9790 {
9791 MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
9792 : sType( StructureType::eMemoryBarrier )
9793 , pNext( nullptr )
9794 , srcAccessMask( srcAccessMask_ )
9795 , dstAccessMask( dstAccessMask_ )
9796 {
9797 }
9798
9799 MemoryBarrier( VkMemoryBarrier const & rhs )
9800 {
9801 memcpy( this, &rhs, sizeof(MemoryBarrier) );
9802 }
9803
9804 MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
9805 {
9806 memcpy( this, &rhs, sizeof(MemoryBarrier) );
9807 return *this;
9808 }
9809
9810 MemoryBarrier& setSType( StructureType sType_ )
9811 {
9812 sType = sType_;
9813 return *this;
9814 }
9815
9816 MemoryBarrier& setPNext( const void* pNext_ )
9817 {
9818 pNext = pNext_;
9819 return *this;
9820 }
9821
9822 MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
9823 {
9824 srcAccessMask = srcAccessMask_;
9825 return *this;
9826 }
9827
9828 MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
9829 {
9830 dstAccessMask = dstAccessMask_;
9831 return *this;
9832 }
9833
9834 operator const VkMemoryBarrier&() const
9835 {
9836 return *reinterpret_cast<const VkMemoryBarrier*>(this);
9837 }
9838
9839 bool operator==( MemoryBarrier const& rhs ) const
9840 {
9841 return ( sType == rhs.sType )
9842 && ( pNext == rhs.pNext )
9843 && ( srcAccessMask == rhs.srcAccessMask )
9844 && ( dstAccessMask == rhs.dstAccessMask );
9845 }
9846
9847 bool operator!=( MemoryBarrier const& rhs ) const
9848 {
9849 return !operator==( rhs );
9850 }
9851
9852 private:
9853 StructureType sType;
9854
9855 public:
9856 const void* pNext;
9857 AccessFlags srcAccessMask;
9858 AccessFlags dstAccessMask;
9859 };
9860 static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
9861
9862 struct BufferMemoryBarrier
9863 {
9864 BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
9865 : sType( StructureType::eBufferMemoryBarrier )
9866 , pNext( nullptr )
9867 , srcAccessMask( srcAccessMask_ )
9868 , dstAccessMask( dstAccessMask_ )
9869 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
9870 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
9871 , buffer( buffer_ )
9872 , offset( offset_ )
9873 , size( size_ )
9874 {
9875 }
9876
9877 BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
9878 {
9879 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
9880 }
9881
9882 BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
9883 {
9884 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
9885 return *this;
9886 }
9887
9888 BufferMemoryBarrier& setSType( StructureType sType_ )
9889 {
9890 sType = sType_;
9891 return *this;
9892 }
9893
9894 BufferMemoryBarrier& setPNext( const void* pNext_ )
9895 {
9896 pNext = pNext_;
9897 return *this;
9898 }
9899
9900 BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
9901 {
9902 srcAccessMask = srcAccessMask_;
9903 return *this;
9904 }
9905
9906 BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
9907 {
9908 dstAccessMask = dstAccessMask_;
9909 return *this;
9910 }
9911
9912 BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
9913 {
9914 srcQueueFamilyIndex = srcQueueFamilyIndex_;
9915 return *this;
9916 }
9917
9918 BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
9919 {
9920 dstQueueFamilyIndex = dstQueueFamilyIndex_;
9921 return *this;
9922 }
9923
9924 BufferMemoryBarrier& setBuffer( Buffer buffer_ )
9925 {
9926 buffer = buffer_;
9927 return *this;
9928 }
9929
9930 BufferMemoryBarrier& setOffset( DeviceSize offset_ )
9931 {
9932 offset = offset_;
9933 return *this;
9934 }
9935
9936 BufferMemoryBarrier& setSize( DeviceSize size_ )
9937 {
9938 size = size_;
9939 return *this;
9940 }
9941
9942 operator const VkBufferMemoryBarrier&() const
9943 {
9944 return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
9945 }
9946
9947 bool operator==( BufferMemoryBarrier const& rhs ) const
9948 {
9949 return ( sType == rhs.sType )
9950 && ( pNext == rhs.pNext )
9951 && ( srcAccessMask == rhs.srcAccessMask )
9952 && ( dstAccessMask == rhs.dstAccessMask )
9953 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
9954 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
9955 && ( buffer == rhs.buffer )
9956 && ( offset == rhs.offset )
9957 && ( size == rhs.size );
9958 }
9959
9960 bool operator!=( BufferMemoryBarrier const& rhs ) const
9961 {
9962 return !operator==( rhs );
9963 }
9964
9965 private:
9966 StructureType sType;
9967
9968 public:
9969 const void* pNext;
9970 AccessFlags srcAccessMask;
9971 AccessFlags dstAccessMask;
9972 uint32_t srcQueueFamilyIndex;
9973 uint32_t dstQueueFamilyIndex;
9974 Buffer buffer;
9975 DeviceSize offset;
9976 DeviceSize size;
9977 };
9978 static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
9979
9980 enum class BufferUsageFlagBits
9981 {
9982 eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
9983 eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
9984 eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
9985 eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
9986 eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
9987 eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
9988 eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
9989 eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
9990 eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
9991 };
9992
9993 using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
9994
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009995 VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009996 {
9997 return BufferUsageFlags( bit0 ) | bit1;
9998 }
9999
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010000 VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
10001 {
10002 return ~( BufferUsageFlags( bits ) );
10003 }
10004
10005 template <> struct FlagTraits<BufferUsageFlagBits>
10006 {
10007 enum
10008 {
10009 allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
10010 };
10011 };
10012
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010013 enum class BufferCreateFlagBits
10014 {
10015 eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
10016 eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
10017 eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
10018 };
10019
10020 using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
10021
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010022 VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010023 {
10024 return BufferCreateFlags( bit0 ) | bit1;
10025 }
10026
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010027 VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
10028 {
10029 return ~( BufferCreateFlags( bits ) );
10030 }
10031
10032 template <> struct FlagTraits<BufferCreateFlagBits>
10033 {
10034 enum
10035 {
10036 allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
10037 };
10038 };
10039
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010040 struct BufferCreateInfo
10041 {
10042 BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
10043 : sType( StructureType::eBufferCreateInfo )
10044 , pNext( nullptr )
10045 , flags( flags_ )
10046 , size( size_ )
10047 , usage( usage_ )
10048 , sharingMode( sharingMode_ )
10049 , queueFamilyIndexCount( queueFamilyIndexCount_ )
10050 , pQueueFamilyIndices( pQueueFamilyIndices_ )
10051 {
10052 }
10053
10054 BufferCreateInfo( VkBufferCreateInfo const & rhs )
10055 {
10056 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10057 }
10058
10059 BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
10060 {
10061 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10062 return *this;
10063 }
10064
10065 BufferCreateInfo& setSType( StructureType sType_ )
10066 {
10067 sType = sType_;
10068 return *this;
10069 }
10070
10071 BufferCreateInfo& setPNext( const void* pNext_ )
10072 {
10073 pNext = pNext_;
10074 return *this;
10075 }
10076
10077 BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
10078 {
10079 flags = flags_;
10080 return *this;
10081 }
10082
10083 BufferCreateInfo& setSize( DeviceSize size_ )
10084 {
10085 size = size_;
10086 return *this;
10087 }
10088
10089 BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
10090 {
10091 usage = usage_;
10092 return *this;
10093 }
10094
10095 BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
10096 {
10097 sharingMode = sharingMode_;
10098 return *this;
10099 }
10100
10101 BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
10102 {
10103 queueFamilyIndexCount = queueFamilyIndexCount_;
10104 return *this;
10105 }
10106
10107 BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
10108 {
10109 pQueueFamilyIndices = pQueueFamilyIndices_;
10110 return *this;
10111 }
10112
10113 operator const VkBufferCreateInfo&() const
10114 {
10115 return *reinterpret_cast<const VkBufferCreateInfo*>(this);
10116 }
10117
10118 bool operator==( BufferCreateInfo const& rhs ) const
10119 {
10120 return ( sType == rhs.sType )
10121 && ( pNext == rhs.pNext )
10122 && ( flags == rhs.flags )
10123 && ( size == rhs.size )
10124 && ( usage == rhs.usage )
10125 && ( sharingMode == rhs.sharingMode )
10126 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
10127 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
10128 }
10129
10130 bool operator!=( BufferCreateInfo const& rhs ) const
10131 {
10132 return !operator==( rhs );
10133 }
10134
10135 private:
10136 StructureType sType;
10137
10138 public:
10139 const void* pNext;
10140 BufferCreateFlags flags;
10141 DeviceSize size;
10142 BufferUsageFlags usage;
10143 SharingMode sharingMode;
10144 uint32_t queueFamilyIndexCount;
10145 const uint32_t* pQueueFamilyIndices;
10146 };
10147 static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
10148
10149 enum class ShaderStageFlagBits
10150 {
10151 eVertex = VK_SHADER_STAGE_VERTEX_BIT,
10152 eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
10153 eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
10154 eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
10155 eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
10156 eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
10157 eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
10158 eAll = VK_SHADER_STAGE_ALL
10159 };
10160
10161 using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
10162
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010163 VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010164 {
10165 return ShaderStageFlags( bit0 ) | bit1;
10166 }
10167
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010168 VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
10169 {
10170 return ~( ShaderStageFlags( bits ) );
10171 }
10172
10173 template <> struct FlagTraits<ShaderStageFlagBits>
10174 {
10175 enum
10176 {
10177 allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
10178 };
10179 };
10180
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010181 struct DescriptorSetLayoutBinding
10182 {
10183 DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
10184 : binding( binding_ )
10185 , descriptorType( descriptorType_ )
10186 , descriptorCount( descriptorCount_ )
10187 , stageFlags( stageFlags_ )
10188 , pImmutableSamplers( pImmutableSamplers_ )
10189 {
10190 }
10191
10192 DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
10193 {
10194 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10195 }
10196
10197 DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
10198 {
10199 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10200 return *this;
10201 }
10202
10203 DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
10204 {
10205 binding = binding_;
10206 return *this;
10207 }
10208
10209 DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
10210 {
10211 descriptorType = descriptorType_;
10212 return *this;
10213 }
10214
10215 DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
10216 {
10217 descriptorCount = descriptorCount_;
10218 return *this;
10219 }
10220
10221 DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
10222 {
10223 stageFlags = stageFlags_;
10224 return *this;
10225 }
10226
10227 DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
10228 {
10229 pImmutableSamplers = pImmutableSamplers_;
10230 return *this;
10231 }
10232
10233 operator const VkDescriptorSetLayoutBinding&() const
10234 {
10235 return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
10236 }
10237
10238 bool operator==( DescriptorSetLayoutBinding const& rhs ) const
10239 {
10240 return ( binding == rhs.binding )
10241 && ( descriptorType == rhs.descriptorType )
10242 && ( descriptorCount == rhs.descriptorCount )
10243 && ( stageFlags == rhs.stageFlags )
10244 && ( pImmutableSamplers == rhs.pImmutableSamplers );
10245 }
10246
10247 bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
10248 {
10249 return !operator==( rhs );
10250 }
10251
10252 uint32_t binding;
10253 DescriptorType descriptorType;
10254 uint32_t descriptorCount;
10255 ShaderStageFlags stageFlags;
10256 const Sampler* pImmutableSamplers;
10257 };
10258 static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
10259
10260 struct DescriptorSetLayoutCreateInfo
10261 {
10262 DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr )
10263 : sType( StructureType::eDescriptorSetLayoutCreateInfo )
10264 , pNext( nullptr )
10265 , flags( flags_ )
10266 , bindingCount( bindingCount_ )
10267 , pBindings( pBindings_ )
10268 {
10269 }
10270
10271 DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
10272 {
10273 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10274 }
10275
10276 DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
10277 {
10278 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10279 return *this;
10280 }
10281
10282 DescriptorSetLayoutCreateInfo& setSType( StructureType sType_ )
10283 {
10284 sType = sType_;
10285 return *this;
10286 }
10287
10288 DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
10289 {
10290 pNext = pNext_;
10291 return *this;
10292 }
10293
10294 DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
10295 {
10296 flags = flags_;
10297 return *this;
10298 }
10299
10300 DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
10301 {
10302 bindingCount = bindingCount_;
10303 return *this;
10304 }
10305
10306 DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
10307 {
10308 pBindings = pBindings_;
10309 return *this;
10310 }
10311
10312 operator const VkDescriptorSetLayoutCreateInfo&() const
10313 {
10314 return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
10315 }
10316
10317 bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
10318 {
10319 return ( sType == rhs.sType )
10320 && ( pNext == rhs.pNext )
10321 && ( flags == rhs.flags )
10322 && ( bindingCount == rhs.bindingCount )
10323 && ( pBindings == rhs.pBindings );
10324 }
10325
10326 bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
10327 {
10328 return !operator==( rhs );
10329 }
10330
10331 private:
10332 StructureType sType;
10333
10334 public:
10335 const void* pNext;
10336 DescriptorSetLayoutCreateFlags flags;
10337 uint32_t bindingCount;
10338 const DescriptorSetLayoutBinding* pBindings;
10339 };
10340 static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
10341
10342 struct PipelineShaderStageCreateInfo
10343 {
10344 PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr )
10345 : sType( StructureType::ePipelineShaderStageCreateInfo )
10346 , pNext( nullptr )
10347 , flags( flags_ )
10348 , stage( stage_ )
10349 , module( module_ )
10350 , pName( pName_ )
10351 , pSpecializationInfo( pSpecializationInfo_ )
10352 {
10353 }
10354
10355 PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
10356 {
10357 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10358 }
10359
10360 PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
10361 {
10362 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10363 return *this;
10364 }
10365
10366 PipelineShaderStageCreateInfo& setSType( StructureType sType_ )
10367 {
10368 sType = sType_;
10369 return *this;
10370 }
10371
10372 PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
10373 {
10374 pNext = pNext_;
10375 return *this;
10376 }
10377
10378 PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
10379 {
10380 flags = flags_;
10381 return *this;
10382 }
10383
10384 PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
10385 {
10386 stage = stage_;
10387 return *this;
10388 }
10389
10390 PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
10391 {
10392 module = module_;
10393 return *this;
10394 }
10395
10396 PipelineShaderStageCreateInfo& setPName( const char* pName_ )
10397 {
10398 pName = pName_;
10399 return *this;
10400 }
10401
10402 PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
10403 {
10404 pSpecializationInfo = pSpecializationInfo_;
10405 return *this;
10406 }
10407
10408 operator const VkPipelineShaderStageCreateInfo&() const
10409 {
10410 return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
10411 }
10412
10413 bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
10414 {
10415 return ( sType == rhs.sType )
10416 && ( pNext == rhs.pNext )
10417 && ( flags == rhs.flags )
10418 && ( stage == rhs.stage )
10419 && ( module == rhs.module )
10420 && ( pName == rhs.pName )
10421 && ( pSpecializationInfo == rhs.pSpecializationInfo );
10422 }
10423
10424 bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
10425 {
10426 return !operator==( rhs );
10427 }
10428
10429 private:
10430 StructureType sType;
10431
10432 public:
10433 const void* pNext;
10434 PipelineShaderStageCreateFlags flags;
10435 ShaderStageFlagBits stage;
10436 ShaderModule module;
10437 const char* pName;
10438 const SpecializationInfo* pSpecializationInfo;
10439 };
10440 static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
10441
10442 struct PushConstantRange
10443 {
10444 PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 )
10445 : stageFlags( stageFlags_ )
10446 , offset( offset_ )
10447 , size( size_ )
10448 {
10449 }
10450
10451 PushConstantRange( VkPushConstantRange const & rhs )
10452 {
10453 memcpy( this, &rhs, sizeof(PushConstantRange) );
10454 }
10455
10456 PushConstantRange& operator=( VkPushConstantRange const & rhs )
10457 {
10458 memcpy( this, &rhs, sizeof(PushConstantRange) );
10459 return *this;
10460 }
10461
10462 PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
10463 {
10464 stageFlags = stageFlags_;
10465 return *this;
10466 }
10467
10468 PushConstantRange& setOffset( uint32_t offset_ )
10469 {
10470 offset = offset_;
10471 return *this;
10472 }
10473
10474 PushConstantRange& setSize( uint32_t size_ )
10475 {
10476 size = size_;
10477 return *this;
10478 }
10479
10480 operator const VkPushConstantRange&() const
10481 {
10482 return *reinterpret_cast<const VkPushConstantRange*>(this);
10483 }
10484
10485 bool operator==( PushConstantRange const& rhs ) const
10486 {
10487 return ( stageFlags == rhs.stageFlags )
10488 && ( offset == rhs.offset )
10489 && ( size == rhs.size );
10490 }
10491
10492 bool operator!=( PushConstantRange const& rhs ) const
10493 {
10494 return !operator==( rhs );
10495 }
10496
10497 ShaderStageFlags stageFlags;
10498 uint32_t offset;
10499 uint32_t size;
10500 };
10501 static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
10502
10503 struct PipelineLayoutCreateInfo
10504 {
10505 PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr )
10506 : sType( StructureType::ePipelineLayoutCreateInfo )
10507 , pNext( nullptr )
10508 , flags( flags_ )
10509 , setLayoutCount( setLayoutCount_ )
10510 , pSetLayouts( pSetLayouts_ )
10511 , pushConstantRangeCount( pushConstantRangeCount_ )
10512 , pPushConstantRanges( pPushConstantRanges_ )
10513 {
10514 }
10515
10516 PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
10517 {
10518 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10519 }
10520
10521 PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
10522 {
10523 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10524 return *this;
10525 }
10526
10527 PipelineLayoutCreateInfo& setSType( StructureType sType_ )
10528 {
10529 sType = sType_;
10530 return *this;
10531 }
10532
10533 PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
10534 {
10535 pNext = pNext_;
10536 return *this;
10537 }
10538
10539 PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
10540 {
10541 flags = flags_;
10542 return *this;
10543 }
10544
10545 PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
10546 {
10547 setLayoutCount = setLayoutCount_;
10548 return *this;
10549 }
10550
10551 PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
10552 {
10553 pSetLayouts = pSetLayouts_;
10554 return *this;
10555 }
10556
10557 PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
10558 {
10559 pushConstantRangeCount = pushConstantRangeCount_;
10560 return *this;
10561 }
10562
10563 PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
10564 {
10565 pPushConstantRanges = pPushConstantRanges_;
10566 return *this;
10567 }
10568
10569 operator const VkPipelineLayoutCreateInfo&() const
10570 {
10571 return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
10572 }
10573
10574 bool operator==( PipelineLayoutCreateInfo const& rhs ) const
10575 {
10576 return ( sType == rhs.sType )
10577 && ( pNext == rhs.pNext )
10578 && ( flags == rhs.flags )
10579 && ( setLayoutCount == rhs.setLayoutCount )
10580 && ( pSetLayouts == rhs.pSetLayouts )
10581 && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
10582 && ( pPushConstantRanges == rhs.pPushConstantRanges );
10583 }
10584
10585 bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
10586 {
10587 return !operator==( rhs );
10588 }
10589
10590 private:
10591 StructureType sType;
10592
10593 public:
10594 const void* pNext;
10595 PipelineLayoutCreateFlags flags;
10596 uint32_t setLayoutCount;
10597 const DescriptorSetLayout* pSetLayouts;
10598 uint32_t pushConstantRangeCount;
10599 const PushConstantRange* pPushConstantRanges;
10600 };
10601 static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
10602
10603 enum class ImageUsageFlagBits
10604 {
10605 eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
10606 eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
10607 eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
10608 eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
10609 eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
10610 eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
10611 eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
10612 eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
10613 };
10614
10615 using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
10616
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010617 VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010618 {
10619 return ImageUsageFlags( bit0 ) | bit1;
10620 }
10621
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010622 VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
10623 {
10624 return ~( ImageUsageFlags( bits ) );
10625 }
10626
10627 template <> struct FlagTraits<ImageUsageFlagBits>
10628 {
10629 enum
10630 {
10631 allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
10632 };
10633 };
10634
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010635 enum class ImageCreateFlagBits
10636 {
10637 eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
10638 eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
10639 eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
10640 eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
10641 eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT
10642 };
10643
10644 using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
10645
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010646 VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010647 {
10648 return ImageCreateFlags( bit0 ) | bit1;
10649 }
10650
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010651 VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
10652 {
10653 return ~( ImageCreateFlags( bits ) );
10654 }
10655
10656 template <> struct FlagTraits<ImageCreateFlagBits>
10657 {
10658 enum
10659 {
10660 allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible)
10661 };
10662 };
10663
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010664 enum class PipelineCreateFlagBits
10665 {
10666 eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
10667 eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
10668 eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT
10669 };
10670
10671 using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
10672
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010673 VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010674 {
10675 return PipelineCreateFlags( bit0 ) | bit1;
10676 }
10677
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010678 VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
10679 {
10680 return ~( PipelineCreateFlags( bits ) );
10681 }
10682
10683 template <> struct FlagTraits<PipelineCreateFlagBits>
10684 {
10685 enum
10686 {
10687 allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative)
10688 };
10689 };
10690
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010691 struct ComputePipelineCreateInfo
10692 {
10693 ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
10694 : sType( StructureType::eComputePipelineCreateInfo )
10695 , pNext( nullptr )
10696 , flags( flags_ )
10697 , stage( stage_ )
10698 , layout( layout_ )
10699 , basePipelineHandle( basePipelineHandle_ )
10700 , basePipelineIndex( basePipelineIndex_ )
10701 {
10702 }
10703
10704 ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
10705 {
10706 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
10707 }
10708
10709 ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
10710 {
10711 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
10712 return *this;
10713 }
10714
10715 ComputePipelineCreateInfo& setSType( StructureType sType_ )
10716 {
10717 sType = sType_;
10718 return *this;
10719 }
10720
10721 ComputePipelineCreateInfo& setPNext( const void* pNext_ )
10722 {
10723 pNext = pNext_;
10724 return *this;
10725 }
10726
10727 ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
10728 {
10729 flags = flags_;
10730 return *this;
10731 }
10732
10733 ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
10734 {
10735 stage = stage_;
10736 return *this;
10737 }
10738
10739 ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
10740 {
10741 layout = layout_;
10742 return *this;
10743 }
10744
10745 ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
10746 {
10747 basePipelineHandle = basePipelineHandle_;
10748 return *this;
10749 }
10750
10751 ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
10752 {
10753 basePipelineIndex = basePipelineIndex_;
10754 return *this;
10755 }
10756
10757 operator const VkComputePipelineCreateInfo&() const
10758 {
10759 return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
10760 }
10761
10762 bool operator==( ComputePipelineCreateInfo const& rhs ) const
10763 {
10764 return ( sType == rhs.sType )
10765 && ( pNext == rhs.pNext )
10766 && ( flags == rhs.flags )
10767 && ( stage == rhs.stage )
10768 && ( layout == rhs.layout )
10769 && ( basePipelineHandle == rhs.basePipelineHandle )
10770 && ( basePipelineIndex == rhs.basePipelineIndex );
10771 }
10772
10773 bool operator!=( ComputePipelineCreateInfo const& rhs ) const
10774 {
10775 return !operator==( rhs );
10776 }
10777
10778 private:
10779 StructureType sType;
10780
10781 public:
10782 const void* pNext;
10783 PipelineCreateFlags flags;
10784 PipelineShaderStageCreateInfo stage;
10785 PipelineLayout layout;
10786 Pipeline basePipelineHandle;
10787 int32_t basePipelineIndex;
10788 };
10789 static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
10790
10791 enum class ColorComponentFlagBits
10792 {
10793 eR = VK_COLOR_COMPONENT_R_BIT,
10794 eG = VK_COLOR_COMPONENT_G_BIT,
10795 eB = VK_COLOR_COMPONENT_B_BIT,
10796 eA = VK_COLOR_COMPONENT_A_BIT
10797 };
10798
10799 using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
10800
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010801 VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010802 {
10803 return ColorComponentFlags( bit0 ) | bit1;
10804 }
10805
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010806 VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
10807 {
10808 return ~( ColorComponentFlags( bits ) );
10809 }
10810
10811 template <> struct FlagTraits<ColorComponentFlagBits>
10812 {
10813 enum
10814 {
10815 allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
10816 };
10817 };
10818
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010819 struct PipelineColorBlendAttachmentState
10820 {
10821 PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
10822 : blendEnable( blendEnable_ )
10823 , srcColorBlendFactor( srcColorBlendFactor_ )
10824 , dstColorBlendFactor( dstColorBlendFactor_ )
10825 , colorBlendOp( colorBlendOp_ )
10826 , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
10827 , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
10828 , alphaBlendOp( alphaBlendOp_ )
10829 , colorWriteMask( colorWriteMask_ )
10830 {
10831 }
10832
10833 PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
10834 {
10835 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
10836 }
10837
10838 PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
10839 {
10840 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
10841 return *this;
10842 }
10843
10844 PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
10845 {
10846 blendEnable = blendEnable_;
10847 return *this;
10848 }
10849
10850 PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
10851 {
10852 srcColorBlendFactor = srcColorBlendFactor_;
10853 return *this;
10854 }
10855
10856 PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
10857 {
10858 dstColorBlendFactor = dstColorBlendFactor_;
10859 return *this;
10860 }
10861
10862 PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
10863 {
10864 colorBlendOp = colorBlendOp_;
10865 return *this;
10866 }
10867
10868 PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
10869 {
10870 srcAlphaBlendFactor = srcAlphaBlendFactor_;
10871 return *this;
10872 }
10873
10874 PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
10875 {
10876 dstAlphaBlendFactor = dstAlphaBlendFactor_;
10877 return *this;
10878 }
10879
10880 PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
10881 {
10882 alphaBlendOp = alphaBlendOp_;
10883 return *this;
10884 }
10885
10886 PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
10887 {
10888 colorWriteMask = colorWriteMask_;
10889 return *this;
10890 }
10891
10892 operator const VkPipelineColorBlendAttachmentState&() const
10893 {
10894 return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
10895 }
10896
10897 bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
10898 {
10899 return ( blendEnable == rhs.blendEnable )
10900 && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
10901 && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
10902 && ( colorBlendOp == rhs.colorBlendOp )
10903 && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
10904 && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
10905 && ( alphaBlendOp == rhs.alphaBlendOp )
10906 && ( colorWriteMask == rhs.colorWriteMask );
10907 }
10908
10909 bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
10910 {
10911 return !operator==( rhs );
10912 }
10913
10914 Bool32 blendEnable;
10915 BlendFactor srcColorBlendFactor;
10916 BlendFactor dstColorBlendFactor;
10917 BlendOp colorBlendOp;
10918 BlendFactor srcAlphaBlendFactor;
10919 BlendFactor dstAlphaBlendFactor;
10920 BlendOp alphaBlendOp;
10921 ColorComponentFlags colorWriteMask;
10922 };
10923 static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
10924
10925 struct PipelineColorBlendStateCreateInfo
10926 {
10927 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
10928 : sType( StructureType::ePipelineColorBlendStateCreateInfo )
10929 , pNext( nullptr )
10930 , flags( flags_ )
10931 , logicOpEnable( logicOpEnable_ )
10932 , logicOp( logicOp_ )
10933 , attachmentCount( attachmentCount_ )
10934 , pAttachments( pAttachments_ )
10935 {
10936 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
10937 }
10938
10939 PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
10940 {
10941 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
10942 }
10943
10944 PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
10945 {
10946 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
10947 return *this;
10948 }
10949
10950 PipelineColorBlendStateCreateInfo& setSType( StructureType sType_ )
10951 {
10952 sType = sType_;
10953 return *this;
10954 }
10955
10956 PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
10957 {
10958 pNext = pNext_;
10959 return *this;
10960 }
10961
10962 PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
10963 {
10964 flags = flags_;
10965 return *this;
10966 }
10967
10968 PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
10969 {
10970 logicOpEnable = logicOpEnable_;
10971 return *this;
10972 }
10973
10974 PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
10975 {
10976 logicOp = logicOp_;
10977 return *this;
10978 }
10979
10980 PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
10981 {
10982 attachmentCount = attachmentCount_;
10983 return *this;
10984 }
10985
10986 PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
10987 {
10988 pAttachments = pAttachments_;
10989 return *this;
10990 }
10991
10992 PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
10993 {
10994 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
10995 return *this;
10996 }
10997
10998 operator const VkPipelineColorBlendStateCreateInfo&() const
10999 {
11000 return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
11001 }
11002
11003 bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
11004 {
11005 return ( sType == rhs.sType )
11006 && ( pNext == rhs.pNext )
11007 && ( flags == rhs.flags )
11008 && ( logicOpEnable == rhs.logicOpEnable )
11009 && ( logicOp == rhs.logicOp )
11010 && ( attachmentCount == rhs.attachmentCount )
11011 && ( pAttachments == rhs.pAttachments )
11012 && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
11013 }
11014
11015 bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
11016 {
11017 return !operator==( rhs );
11018 }
11019
11020 private:
11021 StructureType sType;
11022
11023 public:
11024 const void* pNext;
11025 PipelineColorBlendStateCreateFlags flags;
11026 Bool32 logicOpEnable;
11027 LogicOp logicOp;
11028 uint32_t attachmentCount;
11029 const PipelineColorBlendAttachmentState* pAttachments;
11030 float blendConstants[4];
11031 };
11032 static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
11033
11034 enum class FenceCreateFlagBits
11035 {
11036 eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
11037 };
11038
11039 using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
11040
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011041 VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011042 {
11043 return FenceCreateFlags( bit0 ) | bit1;
11044 }
11045
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011046 VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
11047 {
11048 return ~( FenceCreateFlags( bits ) );
11049 }
11050
11051 template <> struct FlagTraits<FenceCreateFlagBits>
11052 {
11053 enum
11054 {
11055 allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
11056 };
11057 };
11058
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011059 struct FenceCreateInfo
11060 {
11061 FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
11062 : sType( StructureType::eFenceCreateInfo )
11063 , pNext( nullptr )
11064 , flags( flags_ )
11065 {
11066 }
11067
11068 FenceCreateInfo( VkFenceCreateInfo const & rhs )
11069 {
11070 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11071 }
11072
11073 FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
11074 {
11075 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11076 return *this;
11077 }
11078
11079 FenceCreateInfo& setSType( StructureType sType_ )
11080 {
11081 sType = sType_;
11082 return *this;
11083 }
11084
11085 FenceCreateInfo& setPNext( const void* pNext_ )
11086 {
11087 pNext = pNext_;
11088 return *this;
11089 }
11090
11091 FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
11092 {
11093 flags = flags_;
11094 return *this;
11095 }
11096
11097 operator const VkFenceCreateInfo&() const
11098 {
11099 return *reinterpret_cast<const VkFenceCreateInfo*>(this);
11100 }
11101
11102 bool operator==( FenceCreateInfo const& rhs ) const
11103 {
11104 return ( sType == rhs.sType )
11105 && ( pNext == rhs.pNext )
11106 && ( flags == rhs.flags );
11107 }
11108
11109 bool operator!=( FenceCreateInfo const& rhs ) const
11110 {
11111 return !operator==( rhs );
11112 }
11113
11114 private:
11115 StructureType sType;
11116
11117 public:
11118 const void* pNext;
11119 FenceCreateFlags flags;
11120 };
11121 static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
11122
11123 enum class FormatFeatureFlagBits
11124 {
11125 eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
11126 eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
11127 eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
11128 eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
11129 eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
11130 eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
11131 eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
11132 eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
11133 eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
11134 eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
11135 eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
11136 eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
11137 eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
11138 eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG
11139 };
11140
11141 using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
11142
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011143 VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011144 {
11145 return FormatFeatureFlags( bit0 ) | bit1;
11146 }
11147
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011148 VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
11149 {
11150 return ~( FormatFeatureFlags( bits ) );
11151 }
11152
11153 template <> struct FlagTraits<FormatFeatureFlagBits>
11154 {
11155 enum
11156 {
11157 allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG)
11158 };
11159 };
11160
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011161 struct FormatProperties
11162 {
11163 operator const VkFormatProperties&() const
11164 {
11165 return *reinterpret_cast<const VkFormatProperties*>(this);
11166 }
11167
11168 bool operator==( FormatProperties const& rhs ) const
11169 {
11170 return ( linearTilingFeatures == rhs.linearTilingFeatures )
11171 && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
11172 && ( bufferFeatures == rhs.bufferFeatures );
11173 }
11174
11175 bool operator!=( FormatProperties const& rhs ) const
11176 {
11177 return !operator==( rhs );
11178 }
11179
11180 FormatFeatureFlags linearTilingFeatures;
11181 FormatFeatureFlags optimalTilingFeatures;
11182 FormatFeatureFlags bufferFeatures;
11183 };
11184 static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
11185
11186 enum class QueryControlFlagBits
11187 {
11188 ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
11189 };
11190
11191 using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
11192
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011193 VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011194 {
11195 return QueryControlFlags( bit0 ) | bit1;
11196 }
11197
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011198 VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
11199 {
11200 return ~( QueryControlFlags( bits ) );
11201 }
11202
11203 template <> struct FlagTraits<QueryControlFlagBits>
11204 {
11205 enum
11206 {
11207 allFlags = VkFlags(QueryControlFlagBits::ePrecise)
11208 };
11209 };
11210
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011211 enum class QueryResultFlagBits
11212 {
11213 e64 = VK_QUERY_RESULT_64_BIT,
11214 eWait = VK_QUERY_RESULT_WAIT_BIT,
11215 eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
11216 ePartial = VK_QUERY_RESULT_PARTIAL_BIT
11217 };
11218
11219 using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
11220
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011221 VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011222 {
11223 return QueryResultFlags( bit0 ) | bit1;
11224 }
11225
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011226 VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
11227 {
11228 return ~( QueryResultFlags( bits ) );
11229 }
11230
11231 template <> struct FlagTraits<QueryResultFlagBits>
11232 {
11233 enum
11234 {
11235 allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
11236 };
11237 };
11238
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011239 enum class CommandBufferUsageFlagBits
11240 {
11241 eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
11242 eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
11243 eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
11244 };
11245
11246 using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
11247
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011248 VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011249 {
11250 return CommandBufferUsageFlags( bit0 ) | bit1;
11251 }
11252
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011253 VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
11254 {
11255 return ~( CommandBufferUsageFlags( bits ) );
11256 }
11257
11258 template <> struct FlagTraits<CommandBufferUsageFlagBits>
11259 {
11260 enum
11261 {
11262 allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
11263 };
11264 };
11265
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011266 enum class QueryPipelineStatisticFlagBits
11267 {
11268 eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
11269 eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
11270 eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
11271 eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
11272 eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
11273 eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
11274 eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
11275 eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
11276 eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
11277 eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
11278 eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
11279 };
11280
11281 using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
11282
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011283 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011284 {
11285 return QueryPipelineStatisticFlags( bit0 ) | bit1;
11286 }
11287
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011288 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
11289 {
11290 return ~( QueryPipelineStatisticFlags( bits ) );
11291 }
11292
11293 template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
11294 {
11295 enum
11296 {
11297 allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
11298 };
11299 };
11300
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011301 struct CommandBufferInheritanceInfo
11302 {
11303 CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11304 : sType( StructureType::eCommandBufferInheritanceInfo )
11305 , pNext( nullptr )
11306 , renderPass( renderPass_ )
11307 , subpass( subpass_ )
11308 , framebuffer( framebuffer_ )
11309 , occlusionQueryEnable( occlusionQueryEnable_ )
11310 , queryFlags( queryFlags_ )
11311 , pipelineStatistics( pipelineStatistics_ )
11312 {
11313 }
11314
11315 CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
11316 {
11317 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11318 }
11319
11320 CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
11321 {
11322 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11323 return *this;
11324 }
11325
11326 CommandBufferInheritanceInfo& setSType( StructureType sType_ )
11327 {
11328 sType = sType_;
11329 return *this;
11330 }
11331
11332 CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
11333 {
11334 pNext = pNext_;
11335 return *this;
11336 }
11337
11338 CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
11339 {
11340 renderPass = renderPass_;
11341 return *this;
11342 }
11343
11344 CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
11345 {
11346 subpass = subpass_;
11347 return *this;
11348 }
11349
11350 CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
11351 {
11352 framebuffer = framebuffer_;
11353 return *this;
11354 }
11355
11356 CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
11357 {
11358 occlusionQueryEnable = occlusionQueryEnable_;
11359 return *this;
11360 }
11361
11362 CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
11363 {
11364 queryFlags = queryFlags_;
11365 return *this;
11366 }
11367
11368 CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11369 {
11370 pipelineStatistics = pipelineStatistics_;
11371 return *this;
11372 }
11373
11374 operator const VkCommandBufferInheritanceInfo&() const
11375 {
11376 return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
11377 }
11378
11379 bool operator==( CommandBufferInheritanceInfo const& rhs ) const
11380 {
11381 return ( sType == rhs.sType )
11382 && ( pNext == rhs.pNext )
11383 && ( renderPass == rhs.renderPass )
11384 && ( subpass == rhs.subpass )
11385 && ( framebuffer == rhs.framebuffer )
11386 && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
11387 && ( queryFlags == rhs.queryFlags )
11388 && ( pipelineStatistics == rhs.pipelineStatistics );
11389 }
11390
11391 bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
11392 {
11393 return !operator==( rhs );
11394 }
11395
11396 private:
11397 StructureType sType;
11398
11399 public:
11400 const void* pNext;
11401 RenderPass renderPass;
11402 uint32_t subpass;
11403 Framebuffer framebuffer;
11404 Bool32 occlusionQueryEnable;
11405 QueryControlFlags queryFlags;
11406 QueryPipelineStatisticFlags pipelineStatistics;
11407 };
11408 static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
11409
11410 struct CommandBufferBeginInfo
11411 {
11412 CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
11413 : sType( StructureType::eCommandBufferBeginInfo )
11414 , pNext( nullptr )
11415 , flags( flags_ )
11416 , pInheritanceInfo( pInheritanceInfo_ )
11417 {
11418 }
11419
11420 CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
11421 {
11422 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11423 }
11424
11425 CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
11426 {
11427 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11428 return *this;
11429 }
11430
11431 CommandBufferBeginInfo& setSType( StructureType sType_ )
11432 {
11433 sType = sType_;
11434 return *this;
11435 }
11436
11437 CommandBufferBeginInfo& setPNext( const void* pNext_ )
11438 {
11439 pNext = pNext_;
11440 return *this;
11441 }
11442
11443 CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
11444 {
11445 flags = flags_;
11446 return *this;
11447 }
11448
11449 CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
11450 {
11451 pInheritanceInfo = pInheritanceInfo_;
11452 return *this;
11453 }
11454
11455 operator const VkCommandBufferBeginInfo&() const
11456 {
11457 return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
11458 }
11459
11460 bool operator==( CommandBufferBeginInfo const& rhs ) const
11461 {
11462 return ( sType == rhs.sType )
11463 && ( pNext == rhs.pNext )
11464 && ( flags == rhs.flags )
11465 && ( pInheritanceInfo == rhs.pInheritanceInfo );
11466 }
11467
11468 bool operator!=( CommandBufferBeginInfo const& rhs ) const
11469 {
11470 return !operator==( rhs );
11471 }
11472
11473 private:
11474 StructureType sType;
11475
11476 public:
11477 const void* pNext;
11478 CommandBufferUsageFlags flags;
11479 const CommandBufferInheritanceInfo* pInheritanceInfo;
11480 };
11481 static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
11482
11483 struct QueryPoolCreateInfo
11484 {
11485 QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11486 : sType( StructureType::eQueryPoolCreateInfo )
11487 , pNext( nullptr )
11488 , flags( flags_ )
11489 , queryType( queryType_ )
11490 , queryCount( queryCount_ )
11491 , pipelineStatistics( pipelineStatistics_ )
11492 {
11493 }
11494
11495 QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
11496 {
11497 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11498 }
11499
11500 QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
11501 {
11502 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11503 return *this;
11504 }
11505
11506 QueryPoolCreateInfo& setSType( StructureType sType_ )
11507 {
11508 sType = sType_;
11509 return *this;
11510 }
11511
11512 QueryPoolCreateInfo& setPNext( const void* pNext_ )
11513 {
11514 pNext = pNext_;
11515 return *this;
11516 }
11517
11518 QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
11519 {
11520 flags = flags_;
11521 return *this;
11522 }
11523
11524 QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
11525 {
11526 queryType = queryType_;
11527 return *this;
11528 }
11529
11530 QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
11531 {
11532 queryCount = queryCount_;
11533 return *this;
11534 }
11535
11536 QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11537 {
11538 pipelineStatistics = pipelineStatistics_;
11539 return *this;
11540 }
11541
11542 operator const VkQueryPoolCreateInfo&() const
11543 {
11544 return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
11545 }
11546
11547 bool operator==( QueryPoolCreateInfo const& rhs ) const
11548 {
11549 return ( sType == rhs.sType )
11550 && ( pNext == rhs.pNext )
11551 && ( flags == rhs.flags )
11552 && ( queryType == rhs.queryType )
11553 && ( queryCount == rhs.queryCount )
11554 && ( pipelineStatistics == rhs.pipelineStatistics );
11555 }
11556
11557 bool operator!=( QueryPoolCreateInfo const& rhs ) const
11558 {
11559 return !operator==( rhs );
11560 }
11561
11562 private:
11563 StructureType sType;
11564
11565 public:
11566 const void* pNext;
11567 QueryPoolCreateFlags flags;
11568 QueryType queryType;
11569 uint32_t queryCount;
11570 QueryPipelineStatisticFlags pipelineStatistics;
11571 };
11572 static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
11573
11574 enum class ImageAspectFlagBits
11575 {
11576 eColor = VK_IMAGE_ASPECT_COLOR_BIT,
11577 eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
11578 eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
11579 eMetadata = VK_IMAGE_ASPECT_METADATA_BIT
11580 };
11581
11582 using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
11583
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011584 VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011585 {
11586 return ImageAspectFlags( bit0 ) | bit1;
11587 }
11588
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011589 VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
11590 {
11591 return ~( ImageAspectFlags( bits ) );
11592 }
11593
11594 template <> struct FlagTraits<ImageAspectFlagBits>
11595 {
11596 enum
11597 {
11598 allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
11599 };
11600 };
11601
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011602 struct ImageSubresource
11603 {
11604 ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
11605 : aspectMask( aspectMask_ )
11606 , mipLevel( mipLevel_ )
11607 , arrayLayer( arrayLayer_ )
11608 {
11609 }
11610
11611 ImageSubresource( VkImageSubresource const & rhs )
11612 {
11613 memcpy( this, &rhs, sizeof(ImageSubresource) );
11614 }
11615
11616 ImageSubresource& operator=( VkImageSubresource const & rhs )
11617 {
11618 memcpy( this, &rhs, sizeof(ImageSubresource) );
11619 return *this;
11620 }
11621
11622 ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
11623 {
11624 aspectMask = aspectMask_;
11625 return *this;
11626 }
11627
11628 ImageSubresource& setMipLevel( uint32_t mipLevel_ )
11629 {
11630 mipLevel = mipLevel_;
11631 return *this;
11632 }
11633
11634 ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
11635 {
11636 arrayLayer = arrayLayer_;
11637 return *this;
11638 }
11639
11640 operator const VkImageSubresource&() const
11641 {
11642 return *reinterpret_cast<const VkImageSubresource*>(this);
11643 }
11644
11645 bool operator==( ImageSubresource const& rhs ) const
11646 {
11647 return ( aspectMask == rhs.aspectMask )
11648 && ( mipLevel == rhs.mipLevel )
11649 && ( arrayLayer == rhs.arrayLayer );
11650 }
11651
11652 bool operator!=( ImageSubresource const& rhs ) const
11653 {
11654 return !operator==( rhs );
11655 }
11656
11657 ImageAspectFlags aspectMask;
11658 uint32_t mipLevel;
11659 uint32_t arrayLayer;
11660 };
11661 static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
11662
11663 struct ImageSubresourceLayers
11664 {
11665 ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
11666 : aspectMask( aspectMask_ )
11667 , mipLevel( mipLevel_ )
11668 , baseArrayLayer( baseArrayLayer_ )
11669 , layerCount( layerCount_ )
11670 {
11671 }
11672
11673 ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
11674 {
11675 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
11676 }
11677
11678 ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
11679 {
11680 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
11681 return *this;
11682 }
11683
11684 ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
11685 {
11686 aspectMask = aspectMask_;
11687 return *this;
11688 }
11689
11690 ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
11691 {
11692 mipLevel = mipLevel_;
11693 return *this;
11694 }
11695
11696 ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
11697 {
11698 baseArrayLayer = baseArrayLayer_;
11699 return *this;
11700 }
11701
11702 ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
11703 {
11704 layerCount = layerCount_;
11705 return *this;
11706 }
11707
11708 operator const VkImageSubresourceLayers&() const
11709 {
11710 return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
11711 }
11712
11713 bool operator==( ImageSubresourceLayers const& rhs ) const
11714 {
11715 return ( aspectMask == rhs.aspectMask )
11716 && ( mipLevel == rhs.mipLevel )
11717 && ( baseArrayLayer == rhs.baseArrayLayer )
11718 && ( layerCount == rhs.layerCount );
11719 }
11720
11721 bool operator!=( ImageSubresourceLayers const& rhs ) const
11722 {
11723 return !operator==( rhs );
11724 }
11725
11726 ImageAspectFlags aspectMask;
11727 uint32_t mipLevel;
11728 uint32_t baseArrayLayer;
11729 uint32_t layerCount;
11730 };
11731 static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
11732
11733 struct ImageSubresourceRange
11734 {
11735 ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
11736 : aspectMask( aspectMask_ )
11737 , baseMipLevel( baseMipLevel_ )
11738 , levelCount( levelCount_ )
11739 , baseArrayLayer( baseArrayLayer_ )
11740 , layerCount( layerCount_ )
11741 {
11742 }
11743
11744 ImageSubresourceRange( VkImageSubresourceRange const & rhs )
11745 {
11746 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
11747 }
11748
11749 ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
11750 {
11751 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
11752 return *this;
11753 }
11754
11755 ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
11756 {
11757 aspectMask = aspectMask_;
11758 return *this;
11759 }
11760
11761 ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
11762 {
11763 baseMipLevel = baseMipLevel_;
11764 return *this;
11765 }
11766
11767 ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
11768 {
11769 levelCount = levelCount_;
11770 return *this;
11771 }
11772
11773 ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
11774 {
11775 baseArrayLayer = baseArrayLayer_;
11776 return *this;
11777 }
11778
11779 ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
11780 {
11781 layerCount = layerCount_;
11782 return *this;
11783 }
11784
11785 operator const VkImageSubresourceRange&() const
11786 {
11787 return *reinterpret_cast<const VkImageSubresourceRange*>(this);
11788 }
11789
11790 bool operator==( ImageSubresourceRange const& rhs ) const
11791 {
11792 return ( aspectMask == rhs.aspectMask )
11793 && ( baseMipLevel == rhs.baseMipLevel )
11794 && ( levelCount == rhs.levelCount )
11795 && ( baseArrayLayer == rhs.baseArrayLayer )
11796 && ( layerCount == rhs.layerCount );
11797 }
11798
11799 bool operator!=( ImageSubresourceRange const& rhs ) const
11800 {
11801 return !operator==( rhs );
11802 }
11803
11804 ImageAspectFlags aspectMask;
11805 uint32_t baseMipLevel;
11806 uint32_t levelCount;
11807 uint32_t baseArrayLayer;
11808 uint32_t layerCount;
11809 };
11810 static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
11811
11812 struct ImageMemoryBarrier
11813 {
11814 ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
11815 : sType( StructureType::eImageMemoryBarrier )
11816 , pNext( nullptr )
11817 , srcAccessMask( srcAccessMask_ )
11818 , dstAccessMask( dstAccessMask_ )
11819 , oldLayout( oldLayout_ )
11820 , newLayout( newLayout_ )
11821 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
11822 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
11823 , image( image_ )
11824 , subresourceRange( subresourceRange_ )
11825 {
11826 }
11827
11828 ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
11829 {
11830 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
11831 }
11832
11833 ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
11834 {
11835 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
11836 return *this;
11837 }
11838
11839 ImageMemoryBarrier& setSType( StructureType sType_ )
11840 {
11841 sType = sType_;
11842 return *this;
11843 }
11844
11845 ImageMemoryBarrier& setPNext( const void* pNext_ )
11846 {
11847 pNext = pNext_;
11848 return *this;
11849 }
11850
11851 ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
11852 {
11853 srcAccessMask = srcAccessMask_;
11854 return *this;
11855 }
11856
11857 ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
11858 {
11859 dstAccessMask = dstAccessMask_;
11860 return *this;
11861 }
11862
11863 ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
11864 {
11865 oldLayout = oldLayout_;
11866 return *this;
11867 }
11868
11869 ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
11870 {
11871 newLayout = newLayout_;
11872 return *this;
11873 }
11874
11875 ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
11876 {
11877 srcQueueFamilyIndex = srcQueueFamilyIndex_;
11878 return *this;
11879 }
11880
11881 ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
11882 {
11883 dstQueueFamilyIndex = dstQueueFamilyIndex_;
11884 return *this;
11885 }
11886
11887 ImageMemoryBarrier& setImage( Image image_ )
11888 {
11889 image = image_;
11890 return *this;
11891 }
11892
11893 ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
11894 {
11895 subresourceRange = subresourceRange_;
11896 return *this;
11897 }
11898
11899 operator const VkImageMemoryBarrier&() const
11900 {
11901 return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
11902 }
11903
11904 bool operator==( ImageMemoryBarrier const& rhs ) const
11905 {
11906 return ( sType == rhs.sType )
11907 && ( pNext == rhs.pNext )
11908 && ( srcAccessMask == rhs.srcAccessMask )
11909 && ( dstAccessMask == rhs.dstAccessMask )
11910 && ( oldLayout == rhs.oldLayout )
11911 && ( newLayout == rhs.newLayout )
11912 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
11913 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
11914 && ( image == rhs.image )
11915 && ( subresourceRange == rhs.subresourceRange );
11916 }
11917
11918 bool operator!=( ImageMemoryBarrier const& rhs ) const
11919 {
11920 return !operator==( rhs );
11921 }
11922
11923 private:
11924 StructureType sType;
11925
11926 public:
11927 const void* pNext;
11928 AccessFlags srcAccessMask;
11929 AccessFlags dstAccessMask;
11930 ImageLayout oldLayout;
11931 ImageLayout newLayout;
11932 uint32_t srcQueueFamilyIndex;
11933 uint32_t dstQueueFamilyIndex;
11934 Image image;
11935 ImageSubresourceRange subresourceRange;
11936 };
11937 static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
11938
11939 struct ImageViewCreateInfo
11940 {
11941 ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
11942 : sType( StructureType::eImageViewCreateInfo )
11943 , pNext( nullptr )
11944 , flags( flags_ )
11945 , image( image_ )
11946 , viewType( viewType_ )
11947 , format( format_ )
11948 , components( components_ )
11949 , subresourceRange( subresourceRange_ )
11950 {
11951 }
11952
11953 ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
11954 {
11955 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
11956 }
11957
11958 ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
11959 {
11960 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
11961 return *this;
11962 }
11963
11964 ImageViewCreateInfo& setSType( StructureType sType_ )
11965 {
11966 sType = sType_;
11967 return *this;
11968 }
11969
11970 ImageViewCreateInfo& setPNext( const void* pNext_ )
11971 {
11972 pNext = pNext_;
11973 return *this;
11974 }
11975
11976 ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
11977 {
11978 flags = flags_;
11979 return *this;
11980 }
11981
11982 ImageViewCreateInfo& setImage( Image image_ )
11983 {
11984 image = image_;
11985 return *this;
11986 }
11987
11988 ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
11989 {
11990 viewType = viewType_;
11991 return *this;
11992 }
11993
11994 ImageViewCreateInfo& setFormat( Format format_ )
11995 {
11996 format = format_;
11997 return *this;
11998 }
11999
12000 ImageViewCreateInfo& setComponents( ComponentMapping components_ )
12001 {
12002 components = components_;
12003 return *this;
12004 }
12005
12006 ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
12007 {
12008 subresourceRange = subresourceRange_;
12009 return *this;
12010 }
12011
12012 operator const VkImageViewCreateInfo&() const
12013 {
12014 return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
12015 }
12016
12017 bool operator==( ImageViewCreateInfo const& rhs ) const
12018 {
12019 return ( sType == rhs.sType )
12020 && ( pNext == rhs.pNext )
12021 && ( flags == rhs.flags )
12022 && ( image == rhs.image )
12023 && ( viewType == rhs.viewType )
12024 && ( format == rhs.format )
12025 && ( components == rhs.components )
12026 && ( subresourceRange == rhs.subresourceRange );
12027 }
12028
12029 bool operator!=( ImageViewCreateInfo const& rhs ) const
12030 {
12031 return !operator==( rhs );
12032 }
12033
12034 private:
12035 StructureType sType;
12036
12037 public:
12038 const void* pNext;
12039 ImageViewCreateFlags flags;
12040 Image image;
12041 ImageViewType viewType;
12042 Format format;
12043 ComponentMapping components;
12044 ImageSubresourceRange subresourceRange;
12045 };
12046 static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
12047
12048 struct ImageCopy
12049 {
12050 ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12051 : srcSubresource( srcSubresource_ )
12052 , srcOffset( srcOffset_ )
12053 , dstSubresource( dstSubresource_ )
12054 , dstOffset( dstOffset_ )
12055 , extent( extent_ )
12056 {
12057 }
12058
12059 ImageCopy( VkImageCopy const & rhs )
12060 {
12061 memcpy( this, &rhs, sizeof(ImageCopy) );
12062 }
12063
12064 ImageCopy& operator=( VkImageCopy const & rhs )
12065 {
12066 memcpy( this, &rhs, sizeof(ImageCopy) );
12067 return *this;
12068 }
12069
12070 ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12071 {
12072 srcSubresource = srcSubresource_;
12073 return *this;
12074 }
12075
12076 ImageCopy& setSrcOffset( Offset3D srcOffset_ )
12077 {
12078 srcOffset = srcOffset_;
12079 return *this;
12080 }
12081
12082 ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12083 {
12084 dstSubresource = dstSubresource_;
12085 return *this;
12086 }
12087
12088 ImageCopy& setDstOffset( Offset3D dstOffset_ )
12089 {
12090 dstOffset = dstOffset_;
12091 return *this;
12092 }
12093
12094 ImageCopy& setExtent( Extent3D extent_ )
12095 {
12096 extent = extent_;
12097 return *this;
12098 }
12099
12100 operator const VkImageCopy&() const
12101 {
12102 return *reinterpret_cast<const VkImageCopy*>(this);
12103 }
12104
12105 bool operator==( ImageCopy const& rhs ) const
12106 {
12107 return ( srcSubresource == rhs.srcSubresource )
12108 && ( srcOffset == rhs.srcOffset )
12109 && ( dstSubresource == rhs.dstSubresource )
12110 && ( dstOffset == rhs.dstOffset )
12111 && ( extent == rhs.extent );
12112 }
12113
12114 bool operator!=( ImageCopy const& rhs ) const
12115 {
12116 return !operator==( rhs );
12117 }
12118
12119 ImageSubresourceLayers srcSubresource;
12120 Offset3D srcOffset;
12121 ImageSubresourceLayers dstSubresource;
12122 Offset3D dstOffset;
12123 Extent3D extent;
12124 };
12125 static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
12126
12127 struct ImageBlit
12128 {
12129 ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
12130 : srcSubresource( srcSubresource_ )
12131 , dstSubresource( dstSubresource_ )
12132 {
12133 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12134 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12135 }
12136
12137 ImageBlit( VkImageBlit const & rhs )
12138 {
12139 memcpy( this, &rhs, sizeof(ImageBlit) );
12140 }
12141
12142 ImageBlit& operator=( VkImageBlit const & rhs )
12143 {
12144 memcpy( this, &rhs, sizeof(ImageBlit) );
12145 return *this;
12146 }
12147
12148 ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12149 {
12150 srcSubresource = srcSubresource_;
12151 return *this;
12152 }
12153
12154 ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
12155 {
12156 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12157 return *this;
12158 }
12159
12160 ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12161 {
12162 dstSubresource = dstSubresource_;
12163 return *this;
12164 }
12165
12166 ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
12167 {
12168 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12169 return *this;
12170 }
12171
12172 operator const VkImageBlit&() const
12173 {
12174 return *reinterpret_cast<const VkImageBlit*>(this);
12175 }
12176
12177 bool operator==( ImageBlit const& rhs ) const
12178 {
12179 return ( srcSubresource == rhs.srcSubresource )
12180 && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
12181 && ( dstSubresource == rhs.dstSubresource )
12182 && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
12183 }
12184
12185 bool operator!=( ImageBlit const& rhs ) const
12186 {
12187 return !operator==( rhs );
12188 }
12189
12190 ImageSubresourceLayers srcSubresource;
12191 Offset3D srcOffsets[2];
12192 ImageSubresourceLayers dstSubresource;
12193 Offset3D dstOffsets[2];
12194 };
12195 static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
12196
12197 struct BufferImageCopy
12198 {
12199 BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() )
12200 : bufferOffset( bufferOffset_ )
12201 , bufferRowLength( bufferRowLength_ )
12202 , bufferImageHeight( bufferImageHeight_ )
12203 , imageSubresource( imageSubresource_ )
12204 , imageOffset( imageOffset_ )
12205 , imageExtent( imageExtent_ )
12206 {
12207 }
12208
12209 BufferImageCopy( VkBufferImageCopy const & rhs )
12210 {
12211 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12212 }
12213
12214 BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
12215 {
12216 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12217 return *this;
12218 }
12219
12220 BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
12221 {
12222 bufferOffset = bufferOffset_;
12223 return *this;
12224 }
12225
12226 BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
12227 {
12228 bufferRowLength = bufferRowLength_;
12229 return *this;
12230 }
12231
12232 BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
12233 {
12234 bufferImageHeight = bufferImageHeight_;
12235 return *this;
12236 }
12237
12238 BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
12239 {
12240 imageSubresource = imageSubresource_;
12241 return *this;
12242 }
12243
12244 BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
12245 {
12246 imageOffset = imageOffset_;
12247 return *this;
12248 }
12249
12250 BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
12251 {
12252 imageExtent = imageExtent_;
12253 return *this;
12254 }
12255
12256 operator const VkBufferImageCopy&() const
12257 {
12258 return *reinterpret_cast<const VkBufferImageCopy*>(this);
12259 }
12260
12261 bool operator==( BufferImageCopy const& rhs ) const
12262 {
12263 return ( bufferOffset == rhs.bufferOffset )
12264 && ( bufferRowLength == rhs.bufferRowLength )
12265 && ( bufferImageHeight == rhs.bufferImageHeight )
12266 && ( imageSubresource == rhs.imageSubresource )
12267 && ( imageOffset == rhs.imageOffset )
12268 && ( imageExtent == rhs.imageExtent );
12269 }
12270
12271 bool operator!=( BufferImageCopy const& rhs ) const
12272 {
12273 return !operator==( rhs );
12274 }
12275
12276 DeviceSize bufferOffset;
12277 uint32_t bufferRowLength;
12278 uint32_t bufferImageHeight;
12279 ImageSubresourceLayers imageSubresource;
12280 Offset3D imageOffset;
12281 Extent3D imageExtent;
12282 };
12283 static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
12284
12285 struct ImageResolve
12286 {
12287 ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12288 : srcSubresource( srcSubresource_ )
12289 , srcOffset( srcOffset_ )
12290 , dstSubresource( dstSubresource_ )
12291 , dstOffset( dstOffset_ )
12292 , extent( extent_ )
12293 {
12294 }
12295
12296 ImageResolve( VkImageResolve const & rhs )
12297 {
12298 memcpy( this, &rhs, sizeof(ImageResolve) );
12299 }
12300
12301 ImageResolve& operator=( VkImageResolve const & rhs )
12302 {
12303 memcpy( this, &rhs, sizeof(ImageResolve) );
12304 return *this;
12305 }
12306
12307 ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12308 {
12309 srcSubresource = srcSubresource_;
12310 return *this;
12311 }
12312
12313 ImageResolve& setSrcOffset( Offset3D srcOffset_ )
12314 {
12315 srcOffset = srcOffset_;
12316 return *this;
12317 }
12318
12319 ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12320 {
12321 dstSubresource = dstSubresource_;
12322 return *this;
12323 }
12324
12325 ImageResolve& setDstOffset( Offset3D dstOffset_ )
12326 {
12327 dstOffset = dstOffset_;
12328 return *this;
12329 }
12330
12331 ImageResolve& setExtent( Extent3D extent_ )
12332 {
12333 extent = extent_;
12334 return *this;
12335 }
12336
12337 operator const VkImageResolve&() const
12338 {
12339 return *reinterpret_cast<const VkImageResolve*>(this);
12340 }
12341
12342 bool operator==( ImageResolve const& rhs ) const
12343 {
12344 return ( srcSubresource == rhs.srcSubresource )
12345 && ( srcOffset == rhs.srcOffset )
12346 && ( dstSubresource == rhs.dstSubresource )
12347 && ( dstOffset == rhs.dstOffset )
12348 && ( extent == rhs.extent );
12349 }
12350
12351 bool operator!=( ImageResolve const& rhs ) const
12352 {
12353 return !operator==( rhs );
12354 }
12355
12356 ImageSubresourceLayers srcSubresource;
12357 Offset3D srcOffset;
12358 ImageSubresourceLayers dstSubresource;
12359 Offset3D dstOffset;
12360 Extent3D extent;
12361 };
12362 static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
12363
12364 struct ClearAttachment
12365 {
12366 ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() )
12367 : aspectMask( aspectMask_ )
12368 , colorAttachment( colorAttachment_ )
12369 , clearValue( clearValue_ )
12370 {
12371 }
12372
12373 ClearAttachment( VkClearAttachment const & rhs )
12374 {
12375 memcpy( this, &rhs, sizeof(ClearAttachment) );
12376 }
12377
12378 ClearAttachment& operator=( VkClearAttachment const & rhs )
12379 {
12380 memcpy( this, &rhs, sizeof(ClearAttachment) );
12381 return *this;
12382 }
12383
12384 ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
12385 {
12386 aspectMask = aspectMask_;
12387 return *this;
12388 }
12389
12390 ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
12391 {
12392 colorAttachment = colorAttachment_;
12393 return *this;
12394 }
12395
12396 ClearAttachment& setClearValue( ClearValue clearValue_ )
12397 {
12398 clearValue = clearValue_;
12399 return *this;
12400 }
12401
12402 operator const VkClearAttachment&() const
12403 {
12404 return *reinterpret_cast<const VkClearAttachment*>(this);
12405 }
12406
12407 ImageAspectFlags aspectMask;
12408 uint32_t colorAttachment;
12409 ClearValue clearValue;
12410 };
12411 static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
12412
12413 enum class SparseImageFormatFlagBits
12414 {
12415 eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
12416 eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
12417 eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
12418 };
12419
12420 using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
12421
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012422 VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012423 {
12424 return SparseImageFormatFlags( bit0 ) | bit1;
12425 }
12426
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012427 VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
12428 {
12429 return ~( SparseImageFormatFlags( bits ) );
12430 }
12431
12432 template <> struct FlagTraits<SparseImageFormatFlagBits>
12433 {
12434 enum
12435 {
12436 allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
12437 };
12438 };
12439
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012440 struct SparseImageFormatProperties
12441 {
12442 operator const VkSparseImageFormatProperties&() const
12443 {
12444 return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
12445 }
12446
12447 bool operator==( SparseImageFormatProperties const& rhs ) const
12448 {
12449 return ( aspectMask == rhs.aspectMask )
12450 && ( imageGranularity == rhs.imageGranularity )
12451 && ( flags == rhs.flags );
12452 }
12453
12454 bool operator!=( SparseImageFormatProperties const& rhs ) const
12455 {
12456 return !operator==( rhs );
12457 }
12458
12459 ImageAspectFlags aspectMask;
12460 Extent3D imageGranularity;
12461 SparseImageFormatFlags flags;
12462 };
12463 static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
12464
12465 struct SparseImageMemoryRequirements
12466 {
12467 operator const VkSparseImageMemoryRequirements&() const
12468 {
12469 return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
12470 }
12471
12472 bool operator==( SparseImageMemoryRequirements const& rhs ) const
12473 {
12474 return ( formatProperties == rhs.formatProperties )
12475 && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
12476 && ( imageMipTailSize == rhs.imageMipTailSize )
12477 && ( imageMipTailOffset == rhs.imageMipTailOffset )
12478 && ( imageMipTailStride == rhs.imageMipTailStride );
12479 }
12480
12481 bool operator!=( SparseImageMemoryRequirements const& rhs ) const
12482 {
12483 return !operator==( rhs );
12484 }
12485
12486 SparseImageFormatProperties formatProperties;
12487 uint32_t imageMipTailFirstLod;
12488 DeviceSize imageMipTailSize;
12489 DeviceSize imageMipTailOffset;
12490 DeviceSize imageMipTailStride;
12491 };
12492 static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
12493
12494 enum class SparseMemoryBindFlagBits
12495 {
12496 eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
12497 };
12498
12499 using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
12500
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012501 VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012502 {
12503 return SparseMemoryBindFlags( bit0 ) | bit1;
12504 }
12505
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012506 VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
12507 {
12508 return ~( SparseMemoryBindFlags( bits ) );
12509 }
12510
12511 template <> struct FlagTraits<SparseMemoryBindFlagBits>
12512 {
12513 enum
12514 {
12515 allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
12516 };
12517 };
12518
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012519 struct SparseMemoryBind
12520 {
12521 SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
12522 : resourceOffset( resourceOffset_ )
12523 , size( size_ )
12524 , memory( memory_ )
12525 , memoryOffset( memoryOffset_ )
12526 , flags( flags_ )
12527 {
12528 }
12529
12530 SparseMemoryBind( VkSparseMemoryBind const & rhs )
12531 {
12532 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12533 }
12534
12535 SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
12536 {
12537 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12538 return *this;
12539 }
12540
12541 SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
12542 {
12543 resourceOffset = resourceOffset_;
12544 return *this;
12545 }
12546
12547 SparseMemoryBind& setSize( DeviceSize size_ )
12548 {
12549 size = size_;
12550 return *this;
12551 }
12552
12553 SparseMemoryBind& setMemory( DeviceMemory memory_ )
12554 {
12555 memory = memory_;
12556 return *this;
12557 }
12558
12559 SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
12560 {
12561 memoryOffset = memoryOffset_;
12562 return *this;
12563 }
12564
12565 SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
12566 {
12567 flags = flags_;
12568 return *this;
12569 }
12570
12571 operator const VkSparseMemoryBind&() const
12572 {
12573 return *reinterpret_cast<const VkSparseMemoryBind*>(this);
12574 }
12575
12576 bool operator==( SparseMemoryBind const& rhs ) const
12577 {
12578 return ( resourceOffset == rhs.resourceOffset )
12579 && ( size == rhs.size )
12580 && ( memory == rhs.memory )
12581 && ( memoryOffset == rhs.memoryOffset )
12582 && ( flags == rhs.flags );
12583 }
12584
12585 bool operator!=( SparseMemoryBind const& rhs ) const
12586 {
12587 return !operator==( rhs );
12588 }
12589
12590 DeviceSize resourceOffset;
12591 DeviceSize size;
12592 DeviceMemory memory;
12593 DeviceSize memoryOffset;
12594 SparseMemoryBindFlags flags;
12595 };
12596 static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
12597
12598 struct SparseImageMemoryBind
12599 {
12600 SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
12601 : subresource( subresource_ )
12602 , offset( offset_ )
12603 , extent( extent_ )
12604 , memory( memory_ )
12605 , memoryOffset( memoryOffset_ )
12606 , flags( flags_ )
12607 {
12608 }
12609
12610 SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
12611 {
12612 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
12613 }
12614
12615 SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
12616 {
12617 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
12618 return *this;
12619 }
12620
12621 SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
12622 {
12623 subresource = subresource_;
12624 return *this;
12625 }
12626
12627 SparseImageMemoryBind& setOffset( Offset3D offset_ )
12628 {
12629 offset = offset_;
12630 return *this;
12631 }
12632
12633 SparseImageMemoryBind& setExtent( Extent3D extent_ )
12634 {
12635 extent = extent_;
12636 return *this;
12637 }
12638
12639 SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
12640 {
12641 memory = memory_;
12642 return *this;
12643 }
12644
12645 SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
12646 {
12647 memoryOffset = memoryOffset_;
12648 return *this;
12649 }
12650
12651 SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
12652 {
12653 flags = flags_;
12654 return *this;
12655 }
12656
12657 operator const VkSparseImageMemoryBind&() const
12658 {
12659 return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
12660 }
12661
12662 bool operator==( SparseImageMemoryBind const& rhs ) const
12663 {
12664 return ( subresource == rhs.subresource )
12665 && ( offset == rhs.offset )
12666 && ( extent == rhs.extent )
12667 && ( memory == rhs.memory )
12668 && ( memoryOffset == rhs.memoryOffset )
12669 && ( flags == rhs.flags );
12670 }
12671
12672 bool operator!=( SparseImageMemoryBind const& rhs ) const
12673 {
12674 return !operator==( rhs );
12675 }
12676
12677 ImageSubresource subresource;
12678 Offset3D offset;
12679 Extent3D extent;
12680 DeviceMemory memory;
12681 DeviceSize memoryOffset;
12682 SparseMemoryBindFlags flags;
12683 };
12684 static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
12685
12686 struct SparseBufferMemoryBindInfo
12687 {
12688 SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
12689 : buffer( buffer_ )
12690 , bindCount( bindCount_ )
12691 , pBinds( pBinds_ )
12692 {
12693 }
12694
12695 SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
12696 {
12697 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
12698 }
12699
12700 SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
12701 {
12702 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
12703 return *this;
12704 }
12705
12706 SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
12707 {
12708 buffer = buffer_;
12709 return *this;
12710 }
12711
12712 SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
12713 {
12714 bindCount = bindCount_;
12715 return *this;
12716 }
12717
12718 SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
12719 {
12720 pBinds = pBinds_;
12721 return *this;
12722 }
12723
12724 operator const VkSparseBufferMemoryBindInfo&() const
12725 {
12726 return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
12727 }
12728
12729 bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
12730 {
12731 return ( buffer == rhs.buffer )
12732 && ( bindCount == rhs.bindCount )
12733 && ( pBinds == rhs.pBinds );
12734 }
12735
12736 bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
12737 {
12738 return !operator==( rhs );
12739 }
12740
12741 Buffer buffer;
12742 uint32_t bindCount;
12743 const SparseMemoryBind* pBinds;
12744 };
12745 static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
12746
12747 struct SparseImageOpaqueMemoryBindInfo
12748 {
12749 SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
12750 : image( image_ )
12751 , bindCount( bindCount_ )
12752 , pBinds( pBinds_ )
12753 {
12754 }
12755
12756 SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
12757 {
12758 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
12759 }
12760
12761 SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
12762 {
12763 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
12764 return *this;
12765 }
12766
12767 SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
12768 {
12769 image = image_;
12770 return *this;
12771 }
12772
12773 SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
12774 {
12775 bindCount = bindCount_;
12776 return *this;
12777 }
12778
12779 SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
12780 {
12781 pBinds = pBinds_;
12782 return *this;
12783 }
12784
12785 operator const VkSparseImageOpaqueMemoryBindInfo&() const
12786 {
12787 return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
12788 }
12789
12790 bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
12791 {
12792 return ( image == rhs.image )
12793 && ( bindCount == rhs.bindCount )
12794 && ( pBinds == rhs.pBinds );
12795 }
12796
12797 bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
12798 {
12799 return !operator==( rhs );
12800 }
12801
12802 Image image;
12803 uint32_t bindCount;
12804 const SparseMemoryBind* pBinds;
12805 };
12806 static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
12807
12808 struct SparseImageMemoryBindInfo
12809 {
12810 SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr )
12811 : image( image_ )
12812 , bindCount( bindCount_ )
12813 , pBinds( pBinds_ )
12814 {
12815 }
12816
12817 SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
12818 {
12819 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
12820 }
12821
12822 SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
12823 {
12824 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
12825 return *this;
12826 }
12827
12828 SparseImageMemoryBindInfo& setImage( Image image_ )
12829 {
12830 image = image_;
12831 return *this;
12832 }
12833
12834 SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
12835 {
12836 bindCount = bindCount_;
12837 return *this;
12838 }
12839
12840 SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
12841 {
12842 pBinds = pBinds_;
12843 return *this;
12844 }
12845
12846 operator const VkSparseImageMemoryBindInfo&() const
12847 {
12848 return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
12849 }
12850
12851 bool operator==( SparseImageMemoryBindInfo const& rhs ) const
12852 {
12853 return ( image == rhs.image )
12854 && ( bindCount == rhs.bindCount )
12855 && ( pBinds == rhs.pBinds );
12856 }
12857
12858 bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
12859 {
12860 return !operator==( rhs );
12861 }
12862
12863 Image image;
12864 uint32_t bindCount;
12865 const SparseImageMemoryBind* pBinds;
12866 };
12867 static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
12868
12869 struct BindSparseInfo
12870 {
12871 BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
12872 : sType( StructureType::eBindSparseInfo )
12873 , pNext( nullptr )
12874 , waitSemaphoreCount( waitSemaphoreCount_ )
12875 , pWaitSemaphores( pWaitSemaphores_ )
12876 , bufferBindCount( bufferBindCount_ )
12877 , pBufferBinds( pBufferBinds_ )
12878 , imageOpaqueBindCount( imageOpaqueBindCount_ )
12879 , pImageOpaqueBinds( pImageOpaqueBinds_ )
12880 , imageBindCount( imageBindCount_ )
12881 , pImageBinds( pImageBinds_ )
12882 , signalSemaphoreCount( signalSemaphoreCount_ )
12883 , pSignalSemaphores( pSignalSemaphores_ )
12884 {
12885 }
12886
12887 BindSparseInfo( VkBindSparseInfo const & rhs )
12888 {
12889 memcpy( this, &rhs, sizeof(BindSparseInfo) );
12890 }
12891
12892 BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
12893 {
12894 memcpy( this, &rhs, sizeof(BindSparseInfo) );
12895 return *this;
12896 }
12897
12898 BindSparseInfo& setSType( StructureType sType_ )
12899 {
12900 sType = sType_;
12901 return *this;
12902 }
12903
12904 BindSparseInfo& setPNext( const void* pNext_ )
12905 {
12906 pNext = pNext_;
12907 return *this;
12908 }
12909
12910 BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
12911 {
12912 waitSemaphoreCount = waitSemaphoreCount_;
12913 return *this;
12914 }
12915
12916 BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
12917 {
12918 pWaitSemaphores = pWaitSemaphores_;
12919 return *this;
12920 }
12921
12922 BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
12923 {
12924 bufferBindCount = bufferBindCount_;
12925 return *this;
12926 }
12927
12928 BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
12929 {
12930 pBufferBinds = pBufferBinds_;
12931 return *this;
12932 }
12933
12934 BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
12935 {
12936 imageOpaqueBindCount = imageOpaqueBindCount_;
12937 return *this;
12938 }
12939
12940 BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
12941 {
12942 pImageOpaqueBinds = pImageOpaqueBinds_;
12943 return *this;
12944 }
12945
12946 BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
12947 {
12948 imageBindCount = imageBindCount_;
12949 return *this;
12950 }
12951
12952 BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
12953 {
12954 pImageBinds = pImageBinds_;
12955 return *this;
12956 }
12957
12958 BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
12959 {
12960 signalSemaphoreCount = signalSemaphoreCount_;
12961 return *this;
12962 }
12963
12964 BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
12965 {
12966 pSignalSemaphores = pSignalSemaphores_;
12967 return *this;
12968 }
12969
12970 operator const VkBindSparseInfo&() const
12971 {
12972 return *reinterpret_cast<const VkBindSparseInfo*>(this);
12973 }
12974
12975 bool operator==( BindSparseInfo const& rhs ) const
12976 {
12977 return ( sType == rhs.sType )
12978 && ( pNext == rhs.pNext )
12979 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
12980 && ( pWaitSemaphores == rhs.pWaitSemaphores )
12981 && ( bufferBindCount == rhs.bufferBindCount )
12982 && ( pBufferBinds == rhs.pBufferBinds )
12983 && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
12984 && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
12985 && ( imageBindCount == rhs.imageBindCount )
12986 && ( pImageBinds == rhs.pImageBinds )
12987 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
12988 && ( pSignalSemaphores == rhs.pSignalSemaphores );
12989 }
12990
12991 bool operator!=( BindSparseInfo const& rhs ) const
12992 {
12993 return !operator==( rhs );
12994 }
12995
12996 private:
12997 StructureType sType;
12998
12999 public:
13000 const void* pNext;
13001 uint32_t waitSemaphoreCount;
13002 const Semaphore* pWaitSemaphores;
13003 uint32_t bufferBindCount;
13004 const SparseBufferMemoryBindInfo* pBufferBinds;
13005 uint32_t imageOpaqueBindCount;
13006 const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
13007 uint32_t imageBindCount;
13008 const SparseImageMemoryBindInfo* pImageBinds;
13009 uint32_t signalSemaphoreCount;
13010 const Semaphore* pSignalSemaphores;
13011 };
13012 static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
13013
13014 enum class PipelineStageFlagBits
13015 {
13016 eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
13017 eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
13018 eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
13019 eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
13020 eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
13021 eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
13022 eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
13023 eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
13024 eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
13025 eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
13026 eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
13027 eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
13028 eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
13029 eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
13030 eHost = VK_PIPELINE_STAGE_HOST_BIT,
13031 eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013032 eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
13033 eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013034 };
13035
13036 using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
13037
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013038 VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013039 {
13040 return PipelineStageFlags( bit0 ) | bit1;
13041 }
13042
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013043 VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
13044 {
13045 return ~( PipelineStageFlags( bits ) );
13046 }
13047
13048 template <> struct FlagTraits<PipelineStageFlagBits>
13049 {
13050 enum
13051 {
13052 allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
13053 };
13054 };
13055
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013056 enum class CommandPoolCreateFlagBits
13057 {
13058 eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
13059 eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
13060 };
13061
13062 using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
13063
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013064 VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013065 {
13066 return CommandPoolCreateFlags( bit0 ) | bit1;
13067 }
13068
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013069 VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
13070 {
13071 return ~( CommandPoolCreateFlags( bits ) );
13072 }
13073
13074 template <> struct FlagTraits<CommandPoolCreateFlagBits>
13075 {
13076 enum
13077 {
13078 allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
13079 };
13080 };
13081
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013082 struct CommandPoolCreateInfo
13083 {
13084 CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
13085 : sType( StructureType::eCommandPoolCreateInfo )
13086 , pNext( nullptr )
13087 , flags( flags_ )
13088 , queueFamilyIndex( queueFamilyIndex_ )
13089 {
13090 }
13091
13092 CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
13093 {
13094 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13095 }
13096
13097 CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
13098 {
13099 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13100 return *this;
13101 }
13102
13103 CommandPoolCreateInfo& setSType( StructureType sType_ )
13104 {
13105 sType = sType_;
13106 return *this;
13107 }
13108
13109 CommandPoolCreateInfo& setPNext( const void* pNext_ )
13110 {
13111 pNext = pNext_;
13112 return *this;
13113 }
13114
13115 CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
13116 {
13117 flags = flags_;
13118 return *this;
13119 }
13120
13121 CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
13122 {
13123 queueFamilyIndex = queueFamilyIndex_;
13124 return *this;
13125 }
13126
13127 operator const VkCommandPoolCreateInfo&() const
13128 {
13129 return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
13130 }
13131
13132 bool operator==( CommandPoolCreateInfo const& rhs ) const
13133 {
13134 return ( sType == rhs.sType )
13135 && ( pNext == rhs.pNext )
13136 && ( flags == rhs.flags )
13137 && ( queueFamilyIndex == rhs.queueFamilyIndex );
13138 }
13139
13140 bool operator!=( CommandPoolCreateInfo const& rhs ) const
13141 {
13142 return !operator==( rhs );
13143 }
13144
13145 private:
13146 StructureType sType;
13147
13148 public:
13149 const void* pNext;
13150 CommandPoolCreateFlags flags;
13151 uint32_t queueFamilyIndex;
13152 };
13153 static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
13154
13155 enum class CommandPoolResetFlagBits
13156 {
13157 eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
13158 };
13159
13160 using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
13161
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013162 VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013163 {
13164 return CommandPoolResetFlags( bit0 ) | bit1;
13165 }
13166
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013167 VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
13168 {
13169 return ~( CommandPoolResetFlags( bits ) );
13170 }
13171
13172 template <> struct FlagTraits<CommandPoolResetFlagBits>
13173 {
13174 enum
13175 {
13176 allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
13177 };
13178 };
13179
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013180 enum class CommandBufferResetFlagBits
13181 {
13182 eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
13183 };
13184
13185 using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
13186
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013187 VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013188 {
13189 return CommandBufferResetFlags( bit0 ) | bit1;
13190 }
13191
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013192 VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
13193 {
13194 return ~( CommandBufferResetFlags( bits ) );
13195 }
13196
13197 template <> struct FlagTraits<CommandBufferResetFlagBits>
13198 {
13199 enum
13200 {
13201 allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
13202 };
13203 };
13204
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013205 enum class SampleCountFlagBits
13206 {
13207 e1 = VK_SAMPLE_COUNT_1_BIT,
13208 e2 = VK_SAMPLE_COUNT_2_BIT,
13209 e4 = VK_SAMPLE_COUNT_4_BIT,
13210 e8 = VK_SAMPLE_COUNT_8_BIT,
13211 e16 = VK_SAMPLE_COUNT_16_BIT,
13212 e32 = VK_SAMPLE_COUNT_32_BIT,
13213 e64 = VK_SAMPLE_COUNT_64_BIT
13214 };
13215
13216 using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
13217
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013218 VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013219 {
13220 return SampleCountFlags( bit0 ) | bit1;
13221 }
13222
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013223 VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
13224 {
13225 return ~( SampleCountFlags( bits ) );
13226 }
13227
13228 template <> struct FlagTraits<SampleCountFlagBits>
13229 {
13230 enum
13231 {
13232 allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
13233 };
13234 };
13235
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013236 struct ImageFormatProperties
13237 {
13238 operator const VkImageFormatProperties&() const
13239 {
13240 return *reinterpret_cast<const VkImageFormatProperties*>(this);
13241 }
13242
13243 bool operator==( ImageFormatProperties const& rhs ) const
13244 {
13245 return ( maxExtent == rhs.maxExtent )
13246 && ( maxMipLevels == rhs.maxMipLevels )
13247 && ( maxArrayLayers == rhs.maxArrayLayers )
13248 && ( sampleCounts == rhs.sampleCounts )
13249 && ( maxResourceSize == rhs.maxResourceSize );
13250 }
13251
13252 bool operator!=( ImageFormatProperties const& rhs ) const
13253 {
13254 return !operator==( rhs );
13255 }
13256
13257 Extent3D maxExtent;
13258 uint32_t maxMipLevels;
13259 uint32_t maxArrayLayers;
13260 SampleCountFlags sampleCounts;
13261 DeviceSize maxResourceSize;
13262 };
13263 static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
13264
13265 struct ImageCreateInfo
13266 {
13267 ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined )
13268 : sType( StructureType::eImageCreateInfo )
13269 , pNext( nullptr )
13270 , flags( flags_ )
13271 , imageType( imageType_ )
13272 , format( format_ )
13273 , extent( extent_ )
13274 , mipLevels( mipLevels_ )
13275 , arrayLayers( arrayLayers_ )
13276 , samples( samples_ )
13277 , tiling( tiling_ )
13278 , usage( usage_ )
13279 , sharingMode( sharingMode_ )
13280 , queueFamilyIndexCount( queueFamilyIndexCount_ )
13281 , pQueueFamilyIndices( pQueueFamilyIndices_ )
13282 , initialLayout( initialLayout_ )
13283 {
13284 }
13285
13286 ImageCreateInfo( VkImageCreateInfo const & rhs )
13287 {
13288 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13289 }
13290
13291 ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
13292 {
13293 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13294 return *this;
13295 }
13296
13297 ImageCreateInfo& setSType( StructureType sType_ )
13298 {
13299 sType = sType_;
13300 return *this;
13301 }
13302
13303 ImageCreateInfo& setPNext( const void* pNext_ )
13304 {
13305 pNext = pNext_;
13306 return *this;
13307 }
13308
13309 ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
13310 {
13311 flags = flags_;
13312 return *this;
13313 }
13314
13315 ImageCreateInfo& setImageType( ImageType imageType_ )
13316 {
13317 imageType = imageType_;
13318 return *this;
13319 }
13320
13321 ImageCreateInfo& setFormat( Format format_ )
13322 {
13323 format = format_;
13324 return *this;
13325 }
13326
13327 ImageCreateInfo& setExtent( Extent3D extent_ )
13328 {
13329 extent = extent_;
13330 return *this;
13331 }
13332
13333 ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
13334 {
13335 mipLevels = mipLevels_;
13336 return *this;
13337 }
13338
13339 ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
13340 {
13341 arrayLayers = arrayLayers_;
13342 return *this;
13343 }
13344
13345 ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
13346 {
13347 samples = samples_;
13348 return *this;
13349 }
13350
13351 ImageCreateInfo& setTiling( ImageTiling tiling_ )
13352 {
13353 tiling = tiling_;
13354 return *this;
13355 }
13356
13357 ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
13358 {
13359 usage = usage_;
13360 return *this;
13361 }
13362
13363 ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
13364 {
13365 sharingMode = sharingMode_;
13366 return *this;
13367 }
13368
13369 ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
13370 {
13371 queueFamilyIndexCount = queueFamilyIndexCount_;
13372 return *this;
13373 }
13374
13375 ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
13376 {
13377 pQueueFamilyIndices = pQueueFamilyIndices_;
13378 return *this;
13379 }
13380
13381 ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
13382 {
13383 initialLayout = initialLayout_;
13384 return *this;
13385 }
13386
13387 operator const VkImageCreateInfo&() const
13388 {
13389 return *reinterpret_cast<const VkImageCreateInfo*>(this);
13390 }
13391
13392 bool operator==( ImageCreateInfo const& rhs ) const
13393 {
13394 return ( sType == rhs.sType )
13395 && ( pNext == rhs.pNext )
13396 && ( flags == rhs.flags )
13397 && ( imageType == rhs.imageType )
13398 && ( format == rhs.format )
13399 && ( extent == rhs.extent )
13400 && ( mipLevels == rhs.mipLevels )
13401 && ( arrayLayers == rhs.arrayLayers )
13402 && ( samples == rhs.samples )
13403 && ( tiling == rhs.tiling )
13404 && ( usage == rhs.usage )
13405 && ( sharingMode == rhs.sharingMode )
13406 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
13407 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
13408 && ( initialLayout == rhs.initialLayout );
13409 }
13410
13411 bool operator!=( ImageCreateInfo const& rhs ) const
13412 {
13413 return !operator==( rhs );
13414 }
13415
13416 private:
13417 StructureType sType;
13418
13419 public:
13420 const void* pNext;
13421 ImageCreateFlags flags;
13422 ImageType imageType;
13423 Format format;
13424 Extent3D extent;
13425 uint32_t mipLevels;
13426 uint32_t arrayLayers;
13427 SampleCountFlagBits samples;
13428 ImageTiling tiling;
13429 ImageUsageFlags usage;
13430 SharingMode sharingMode;
13431 uint32_t queueFamilyIndexCount;
13432 const uint32_t* pQueueFamilyIndices;
13433 ImageLayout initialLayout;
13434 };
13435 static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
13436
13437 struct PipelineMultisampleStateCreateInfo
13438 {
13439 PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 )
13440 : sType( StructureType::ePipelineMultisampleStateCreateInfo )
13441 , pNext( nullptr )
13442 , flags( flags_ )
13443 , rasterizationSamples( rasterizationSamples_ )
13444 , sampleShadingEnable( sampleShadingEnable_ )
13445 , minSampleShading( minSampleShading_ )
13446 , pSampleMask( pSampleMask_ )
13447 , alphaToCoverageEnable( alphaToCoverageEnable_ )
13448 , alphaToOneEnable( alphaToOneEnable_ )
13449 {
13450 }
13451
13452 PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
13453 {
13454 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13455 }
13456
13457 PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
13458 {
13459 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13460 return *this;
13461 }
13462
13463 PipelineMultisampleStateCreateInfo& setSType( StructureType sType_ )
13464 {
13465 sType = sType_;
13466 return *this;
13467 }
13468
13469 PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
13470 {
13471 pNext = pNext_;
13472 return *this;
13473 }
13474
13475 PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
13476 {
13477 flags = flags_;
13478 return *this;
13479 }
13480
13481 PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
13482 {
13483 rasterizationSamples = rasterizationSamples_;
13484 return *this;
13485 }
13486
13487 PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
13488 {
13489 sampleShadingEnable = sampleShadingEnable_;
13490 return *this;
13491 }
13492
13493 PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
13494 {
13495 minSampleShading = minSampleShading_;
13496 return *this;
13497 }
13498
13499 PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
13500 {
13501 pSampleMask = pSampleMask_;
13502 return *this;
13503 }
13504
13505 PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
13506 {
13507 alphaToCoverageEnable = alphaToCoverageEnable_;
13508 return *this;
13509 }
13510
13511 PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
13512 {
13513 alphaToOneEnable = alphaToOneEnable_;
13514 return *this;
13515 }
13516
13517 operator const VkPipelineMultisampleStateCreateInfo&() const
13518 {
13519 return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
13520 }
13521
13522 bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
13523 {
13524 return ( sType == rhs.sType )
13525 && ( pNext == rhs.pNext )
13526 && ( flags == rhs.flags )
13527 && ( rasterizationSamples == rhs.rasterizationSamples )
13528 && ( sampleShadingEnable == rhs.sampleShadingEnable )
13529 && ( minSampleShading == rhs.minSampleShading )
13530 && ( pSampleMask == rhs.pSampleMask )
13531 && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
13532 && ( alphaToOneEnable == rhs.alphaToOneEnable );
13533 }
13534
13535 bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
13536 {
13537 return !operator==( rhs );
13538 }
13539
13540 private:
13541 StructureType sType;
13542
13543 public:
13544 const void* pNext;
13545 PipelineMultisampleStateCreateFlags flags;
13546 SampleCountFlagBits rasterizationSamples;
13547 Bool32 sampleShadingEnable;
13548 float minSampleShading;
13549 const SampleMask* pSampleMask;
13550 Bool32 alphaToCoverageEnable;
13551 Bool32 alphaToOneEnable;
13552 };
13553 static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
13554
13555 struct GraphicsPipelineCreateInfo
13556 {
13557 GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
13558 : sType( StructureType::eGraphicsPipelineCreateInfo )
13559 , pNext( nullptr )
13560 , flags( flags_ )
13561 , stageCount( stageCount_ )
13562 , pStages( pStages_ )
13563 , pVertexInputState( pVertexInputState_ )
13564 , pInputAssemblyState( pInputAssemblyState_ )
13565 , pTessellationState( pTessellationState_ )
13566 , pViewportState( pViewportState_ )
13567 , pRasterizationState( pRasterizationState_ )
13568 , pMultisampleState( pMultisampleState_ )
13569 , pDepthStencilState( pDepthStencilState_ )
13570 , pColorBlendState( pColorBlendState_ )
13571 , pDynamicState( pDynamicState_ )
13572 , layout( layout_ )
13573 , renderPass( renderPass_ )
13574 , subpass( subpass_ )
13575 , basePipelineHandle( basePipelineHandle_ )
13576 , basePipelineIndex( basePipelineIndex_ )
13577 {
13578 }
13579
13580 GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
13581 {
13582 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13583 }
13584
13585 GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
13586 {
13587 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13588 return *this;
13589 }
13590
13591 GraphicsPipelineCreateInfo& setSType( StructureType sType_ )
13592 {
13593 sType = sType_;
13594 return *this;
13595 }
13596
13597 GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
13598 {
13599 pNext = pNext_;
13600 return *this;
13601 }
13602
13603 GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
13604 {
13605 flags = flags_;
13606 return *this;
13607 }
13608
13609 GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
13610 {
13611 stageCount = stageCount_;
13612 return *this;
13613 }
13614
13615 GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
13616 {
13617 pStages = pStages_;
13618 return *this;
13619 }
13620
13621 GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
13622 {
13623 pVertexInputState = pVertexInputState_;
13624 return *this;
13625 }
13626
13627 GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
13628 {
13629 pInputAssemblyState = pInputAssemblyState_;
13630 return *this;
13631 }
13632
13633 GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
13634 {
13635 pTessellationState = pTessellationState_;
13636 return *this;
13637 }
13638
13639 GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
13640 {
13641 pViewportState = pViewportState_;
13642 return *this;
13643 }
13644
13645 GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
13646 {
13647 pRasterizationState = pRasterizationState_;
13648 return *this;
13649 }
13650
13651 GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
13652 {
13653 pMultisampleState = pMultisampleState_;
13654 return *this;
13655 }
13656
13657 GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
13658 {
13659 pDepthStencilState = pDepthStencilState_;
13660 return *this;
13661 }
13662
13663 GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
13664 {
13665 pColorBlendState = pColorBlendState_;
13666 return *this;
13667 }
13668
13669 GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
13670 {
13671 pDynamicState = pDynamicState_;
13672 return *this;
13673 }
13674
13675 GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
13676 {
13677 layout = layout_;
13678 return *this;
13679 }
13680
13681 GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
13682 {
13683 renderPass = renderPass_;
13684 return *this;
13685 }
13686
13687 GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
13688 {
13689 subpass = subpass_;
13690 return *this;
13691 }
13692
13693 GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
13694 {
13695 basePipelineHandle = basePipelineHandle_;
13696 return *this;
13697 }
13698
13699 GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
13700 {
13701 basePipelineIndex = basePipelineIndex_;
13702 return *this;
13703 }
13704
13705 operator const VkGraphicsPipelineCreateInfo&() const
13706 {
13707 return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
13708 }
13709
13710 bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
13711 {
13712 return ( sType == rhs.sType )
13713 && ( pNext == rhs.pNext )
13714 && ( flags == rhs.flags )
13715 && ( stageCount == rhs.stageCount )
13716 && ( pStages == rhs.pStages )
13717 && ( pVertexInputState == rhs.pVertexInputState )
13718 && ( pInputAssemblyState == rhs.pInputAssemblyState )
13719 && ( pTessellationState == rhs.pTessellationState )
13720 && ( pViewportState == rhs.pViewportState )
13721 && ( pRasterizationState == rhs.pRasterizationState )
13722 && ( pMultisampleState == rhs.pMultisampleState )
13723 && ( pDepthStencilState == rhs.pDepthStencilState )
13724 && ( pColorBlendState == rhs.pColorBlendState )
13725 && ( pDynamicState == rhs.pDynamicState )
13726 && ( layout == rhs.layout )
13727 && ( renderPass == rhs.renderPass )
13728 && ( subpass == rhs.subpass )
13729 && ( basePipelineHandle == rhs.basePipelineHandle )
13730 && ( basePipelineIndex == rhs.basePipelineIndex );
13731 }
13732
13733 bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
13734 {
13735 return !operator==( rhs );
13736 }
13737
13738 private:
13739 StructureType sType;
13740
13741 public:
13742 const void* pNext;
13743 PipelineCreateFlags flags;
13744 uint32_t stageCount;
13745 const PipelineShaderStageCreateInfo* pStages;
13746 const PipelineVertexInputStateCreateInfo* pVertexInputState;
13747 const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
13748 const PipelineTessellationStateCreateInfo* pTessellationState;
13749 const PipelineViewportStateCreateInfo* pViewportState;
13750 const PipelineRasterizationStateCreateInfo* pRasterizationState;
13751 const PipelineMultisampleStateCreateInfo* pMultisampleState;
13752 const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
13753 const PipelineColorBlendStateCreateInfo* pColorBlendState;
13754 const PipelineDynamicStateCreateInfo* pDynamicState;
13755 PipelineLayout layout;
13756 RenderPass renderPass;
13757 uint32_t subpass;
13758 Pipeline basePipelineHandle;
13759 int32_t basePipelineIndex;
13760 };
13761 static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
13762
13763 struct PhysicalDeviceLimits
13764 {
13765 operator const VkPhysicalDeviceLimits&() const
13766 {
13767 return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
13768 }
13769
13770 bool operator==( PhysicalDeviceLimits const& rhs ) const
13771 {
13772 return ( maxImageDimension1D == rhs.maxImageDimension1D )
13773 && ( maxImageDimension2D == rhs.maxImageDimension2D )
13774 && ( maxImageDimension3D == rhs.maxImageDimension3D )
13775 && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
13776 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
13777 && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
13778 && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
13779 && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
13780 && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
13781 && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
13782 && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
13783 && ( bufferImageGranularity == rhs.bufferImageGranularity )
13784 && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
13785 && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
13786 && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
13787 && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
13788 && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
13789 && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
13790 && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
13791 && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
13792 && ( maxPerStageResources == rhs.maxPerStageResources )
13793 && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
13794 && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
13795 && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
13796 && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
13797 && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
13798 && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
13799 && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
13800 && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
13801 && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
13802 && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
13803 && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
13804 && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
13805 && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
13806 && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
13807 && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
13808 && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
13809 && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
13810 && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
13811 && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
13812 && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
13813 && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
13814 && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
13815 && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
13816 && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
13817 && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
13818 && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
13819 && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
13820 && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
13821 && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
13822 && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
13823 && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
13824 && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
13825 && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
13826 && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
13827 && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
13828 && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
13829 && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
13830 && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
13831 && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
13832 && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
13833 && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
13834 && ( maxViewports == rhs.maxViewports )
13835 && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
13836 && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
13837 && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
13838 && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
13839 && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
13840 && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
13841 && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
13842 && ( minTexelOffset == rhs.minTexelOffset )
13843 && ( maxTexelOffset == rhs.maxTexelOffset )
13844 && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
13845 && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
13846 && ( minInterpolationOffset == rhs.minInterpolationOffset )
13847 && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
13848 && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
13849 && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
13850 && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
13851 && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
13852 && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
13853 && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
13854 && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
13855 && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
13856 && ( maxColorAttachments == rhs.maxColorAttachments )
13857 && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
13858 && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
13859 && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
13860 && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
13861 && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
13862 && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
13863 && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
13864 && ( timestampPeriod == rhs.timestampPeriod )
13865 && ( maxClipDistances == rhs.maxClipDistances )
13866 && ( maxCullDistances == rhs.maxCullDistances )
13867 && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
13868 && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
13869 && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
13870 && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
13871 && ( pointSizeGranularity == rhs.pointSizeGranularity )
13872 && ( lineWidthGranularity == rhs.lineWidthGranularity )
13873 && ( strictLines == rhs.strictLines )
13874 && ( standardSampleLocations == rhs.standardSampleLocations )
13875 && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
13876 && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
13877 && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
13878 }
13879
13880 bool operator!=( PhysicalDeviceLimits const& rhs ) const
13881 {
13882 return !operator==( rhs );
13883 }
13884
13885 uint32_t maxImageDimension1D;
13886 uint32_t maxImageDimension2D;
13887 uint32_t maxImageDimension3D;
13888 uint32_t maxImageDimensionCube;
13889 uint32_t maxImageArrayLayers;
13890 uint32_t maxTexelBufferElements;
13891 uint32_t maxUniformBufferRange;
13892 uint32_t maxStorageBufferRange;
13893 uint32_t maxPushConstantsSize;
13894 uint32_t maxMemoryAllocationCount;
13895 uint32_t maxSamplerAllocationCount;
13896 DeviceSize bufferImageGranularity;
13897 DeviceSize sparseAddressSpaceSize;
13898 uint32_t maxBoundDescriptorSets;
13899 uint32_t maxPerStageDescriptorSamplers;
13900 uint32_t maxPerStageDescriptorUniformBuffers;
13901 uint32_t maxPerStageDescriptorStorageBuffers;
13902 uint32_t maxPerStageDescriptorSampledImages;
13903 uint32_t maxPerStageDescriptorStorageImages;
13904 uint32_t maxPerStageDescriptorInputAttachments;
13905 uint32_t maxPerStageResources;
13906 uint32_t maxDescriptorSetSamplers;
13907 uint32_t maxDescriptorSetUniformBuffers;
13908 uint32_t maxDescriptorSetUniformBuffersDynamic;
13909 uint32_t maxDescriptorSetStorageBuffers;
13910 uint32_t maxDescriptorSetStorageBuffersDynamic;
13911 uint32_t maxDescriptorSetSampledImages;
13912 uint32_t maxDescriptorSetStorageImages;
13913 uint32_t maxDescriptorSetInputAttachments;
13914 uint32_t maxVertexInputAttributes;
13915 uint32_t maxVertexInputBindings;
13916 uint32_t maxVertexInputAttributeOffset;
13917 uint32_t maxVertexInputBindingStride;
13918 uint32_t maxVertexOutputComponents;
13919 uint32_t maxTessellationGenerationLevel;
13920 uint32_t maxTessellationPatchSize;
13921 uint32_t maxTessellationControlPerVertexInputComponents;
13922 uint32_t maxTessellationControlPerVertexOutputComponents;
13923 uint32_t maxTessellationControlPerPatchOutputComponents;
13924 uint32_t maxTessellationControlTotalOutputComponents;
13925 uint32_t maxTessellationEvaluationInputComponents;
13926 uint32_t maxTessellationEvaluationOutputComponents;
13927 uint32_t maxGeometryShaderInvocations;
13928 uint32_t maxGeometryInputComponents;
13929 uint32_t maxGeometryOutputComponents;
13930 uint32_t maxGeometryOutputVertices;
13931 uint32_t maxGeometryTotalOutputComponents;
13932 uint32_t maxFragmentInputComponents;
13933 uint32_t maxFragmentOutputAttachments;
13934 uint32_t maxFragmentDualSrcAttachments;
13935 uint32_t maxFragmentCombinedOutputResources;
13936 uint32_t maxComputeSharedMemorySize;
13937 uint32_t maxComputeWorkGroupCount[3];
13938 uint32_t maxComputeWorkGroupInvocations;
13939 uint32_t maxComputeWorkGroupSize[3];
13940 uint32_t subPixelPrecisionBits;
13941 uint32_t subTexelPrecisionBits;
13942 uint32_t mipmapPrecisionBits;
13943 uint32_t maxDrawIndexedIndexValue;
13944 uint32_t maxDrawIndirectCount;
13945 float maxSamplerLodBias;
13946 float maxSamplerAnisotropy;
13947 uint32_t maxViewports;
13948 uint32_t maxViewportDimensions[2];
13949 float viewportBoundsRange[2];
13950 uint32_t viewportSubPixelBits;
13951 size_t minMemoryMapAlignment;
13952 DeviceSize minTexelBufferOffsetAlignment;
13953 DeviceSize minUniformBufferOffsetAlignment;
13954 DeviceSize minStorageBufferOffsetAlignment;
13955 int32_t minTexelOffset;
13956 uint32_t maxTexelOffset;
13957 int32_t minTexelGatherOffset;
13958 uint32_t maxTexelGatherOffset;
13959 float minInterpolationOffset;
13960 float maxInterpolationOffset;
13961 uint32_t subPixelInterpolationOffsetBits;
13962 uint32_t maxFramebufferWidth;
13963 uint32_t maxFramebufferHeight;
13964 uint32_t maxFramebufferLayers;
13965 SampleCountFlags framebufferColorSampleCounts;
13966 SampleCountFlags framebufferDepthSampleCounts;
13967 SampleCountFlags framebufferStencilSampleCounts;
13968 SampleCountFlags framebufferNoAttachmentsSampleCounts;
13969 uint32_t maxColorAttachments;
13970 SampleCountFlags sampledImageColorSampleCounts;
13971 SampleCountFlags sampledImageIntegerSampleCounts;
13972 SampleCountFlags sampledImageDepthSampleCounts;
13973 SampleCountFlags sampledImageStencilSampleCounts;
13974 SampleCountFlags storageImageSampleCounts;
13975 uint32_t maxSampleMaskWords;
13976 Bool32 timestampComputeAndGraphics;
13977 float timestampPeriod;
13978 uint32_t maxClipDistances;
13979 uint32_t maxCullDistances;
13980 uint32_t maxCombinedClipAndCullDistances;
13981 uint32_t discreteQueuePriorities;
13982 float pointSizeRange[2];
13983 float lineWidthRange[2];
13984 float pointSizeGranularity;
13985 float lineWidthGranularity;
13986 Bool32 strictLines;
13987 Bool32 standardSampleLocations;
13988 DeviceSize optimalBufferCopyOffsetAlignment;
13989 DeviceSize optimalBufferCopyRowPitchAlignment;
13990 DeviceSize nonCoherentAtomSize;
13991 };
13992 static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
13993
13994 struct PhysicalDeviceProperties
13995 {
13996 operator const VkPhysicalDeviceProperties&() const
13997 {
13998 return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
13999 }
14000
14001 bool operator==( PhysicalDeviceProperties const& rhs ) const
14002 {
14003 return ( apiVersion == rhs.apiVersion )
14004 && ( driverVersion == rhs.driverVersion )
14005 && ( vendorID == rhs.vendorID )
14006 && ( deviceID == rhs.deviceID )
14007 && ( deviceType == rhs.deviceType )
14008 && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
14009 && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
14010 && ( limits == rhs.limits )
14011 && ( sparseProperties == rhs.sparseProperties );
14012 }
14013
14014 bool operator!=( PhysicalDeviceProperties const& rhs ) const
14015 {
14016 return !operator==( rhs );
14017 }
14018
14019 uint32_t apiVersion;
14020 uint32_t driverVersion;
14021 uint32_t vendorID;
14022 uint32_t deviceID;
14023 PhysicalDeviceType deviceType;
14024 char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
14025 uint8_t pipelineCacheUUID[VK_UUID_SIZE];
14026 PhysicalDeviceLimits limits;
14027 PhysicalDeviceSparseProperties sparseProperties;
14028 };
14029 static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
14030
14031 enum class AttachmentDescriptionFlagBits
14032 {
14033 eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
14034 };
14035
14036 using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
14037
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014038 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014039 {
14040 return AttachmentDescriptionFlags( bit0 ) | bit1;
14041 }
14042
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014043 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
14044 {
14045 return ~( AttachmentDescriptionFlags( bits ) );
14046 }
14047
14048 template <> struct FlagTraits<AttachmentDescriptionFlagBits>
14049 {
14050 enum
14051 {
14052 allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
14053 };
14054 };
14055
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014056 struct AttachmentDescription
14057 {
14058 AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
14059 : flags( flags_ )
14060 , format( format_ )
14061 , samples( samples_ )
14062 , loadOp( loadOp_ )
14063 , storeOp( storeOp_ )
14064 , stencilLoadOp( stencilLoadOp_ )
14065 , stencilStoreOp( stencilStoreOp_ )
14066 , initialLayout( initialLayout_ )
14067 , finalLayout( finalLayout_ )
14068 {
14069 }
14070
14071 AttachmentDescription( VkAttachmentDescription const & rhs )
14072 {
14073 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14074 }
14075
14076 AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
14077 {
14078 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14079 return *this;
14080 }
14081
14082 AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
14083 {
14084 flags = flags_;
14085 return *this;
14086 }
14087
14088 AttachmentDescription& setFormat( Format format_ )
14089 {
14090 format = format_;
14091 return *this;
14092 }
14093
14094 AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
14095 {
14096 samples = samples_;
14097 return *this;
14098 }
14099
14100 AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
14101 {
14102 loadOp = loadOp_;
14103 return *this;
14104 }
14105
14106 AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
14107 {
14108 storeOp = storeOp_;
14109 return *this;
14110 }
14111
14112 AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
14113 {
14114 stencilLoadOp = stencilLoadOp_;
14115 return *this;
14116 }
14117
14118 AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
14119 {
14120 stencilStoreOp = stencilStoreOp_;
14121 return *this;
14122 }
14123
14124 AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
14125 {
14126 initialLayout = initialLayout_;
14127 return *this;
14128 }
14129
14130 AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
14131 {
14132 finalLayout = finalLayout_;
14133 return *this;
14134 }
14135
14136 operator const VkAttachmentDescription&() const
14137 {
14138 return *reinterpret_cast<const VkAttachmentDescription*>(this);
14139 }
14140
14141 bool operator==( AttachmentDescription const& rhs ) const
14142 {
14143 return ( flags == rhs.flags )
14144 && ( format == rhs.format )
14145 && ( samples == rhs.samples )
14146 && ( loadOp == rhs.loadOp )
14147 && ( storeOp == rhs.storeOp )
14148 && ( stencilLoadOp == rhs.stencilLoadOp )
14149 && ( stencilStoreOp == rhs.stencilStoreOp )
14150 && ( initialLayout == rhs.initialLayout )
14151 && ( finalLayout == rhs.finalLayout );
14152 }
14153
14154 bool operator!=( AttachmentDescription const& rhs ) const
14155 {
14156 return !operator==( rhs );
14157 }
14158
14159 AttachmentDescriptionFlags flags;
14160 Format format;
14161 SampleCountFlagBits samples;
14162 AttachmentLoadOp loadOp;
14163 AttachmentStoreOp storeOp;
14164 AttachmentLoadOp stencilLoadOp;
14165 AttachmentStoreOp stencilStoreOp;
14166 ImageLayout initialLayout;
14167 ImageLayout finalLayout;
14168 };
14169 static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
14170
14171 enum class StencilFaceFlagBits
14172 {
14173 eFront = VK_STENCIL_FACE_FRONT_BIT,
14174 eBack = VK_STENCIL_FACE_BACK_BIT,
14175 eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
14176 };
14177
14178 using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
14179
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014180 VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014181 {
14182 return StencilFaceFlags( bit0 ) | bit1;
14183 }
14184
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014185 VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
14186 {
14187 return ~( StencilFaceFlags( bits ) );
14188 }
14189
14190 template <> struct FlagTraits<StencilFaceFlagBits>
14191 {
14192 enum
14193 {
14194 allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
14195 };
14196 };
14197
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014198 enum class DescriptorPoolCreateFlagBits
14199 {
14200 eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
14201 };
14202
14203 using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
14204
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014205 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014206 {
14207 return DescriptorPoolCreateFlags( bit0 ) | bit1;
14208 }
14209
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014210 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
14211 {
14212 return ~( DescriptorPoolCreateFlags( bits ) );
14213 }
14214
14215 template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
14216 {
14217 enum
14218 {
14219 allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
14220 };
14221 };
14222
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014223 struct DescriptorPoolCreateInfo
14224 {
14225 DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
14226 : sType( StructureType::eDescriptorPoolCreateInfo )
14227 , pNext( nullptr )
14228 , flags( flags_ )
14229 , maxSets( maxSets_ )
14230 , poolSizeCount( poolSizeCount_ )
14231 , pPoolSizes( pPoolSizes_ )
14232 {
14233 }
14234
14235 DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
14236 {
14237 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14238 }
14239
14240 DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
14241 {
14242 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14243 return *this;
14244 }
14245
14246 DescriptorPoolCreateInfo& setSType( StructureType sType_ )
14247 {
14248 sType = sType_;
14249 return *this;
14250 }
14251
14252 DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
14253 {
14254 pNext = pNext_;
14255 return *this;
14256 }
14257
14258 DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
14259 {
14260 flags = flags_;
14261 return *this;
14262 }
14263
14264 DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
14265 {
14266 maxSets = maxSets_;
14267 return *this;
14268 }
14269
14270 DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
14271 {
14272 poolSizeCount = poolSizeCount_;
14273 return *this;
14274 }
14275
14276 DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
14277 {
14278 pPoolSizes = pPoolSizes_;
14279 return *this;
14280 }
14281
14282 operator const VkDescriptorPoolCreateInfo&() const
14283 {
14284 return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
14285 }
14286
14287 bool operator==( DescriptorPoolCreateInfo const& rhs ) const
14288 {
14289 return ( sType == rhs.sType )
14290 && ( pNext == rhs.pNext )
14291 && ( flags == rhs.flags )
14292 && ( maxSets == rhs.maxSets )
14293 && ( poolSizeCount == rhs.poolSizeCount )
14294 && ( pPoolSizes == rhs.pPoolSizes );
14295 }
14296
14297 bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
14298 {
14299 return !operator==( rhs );
14300 }
14301
14302 private:
14303 StructureType sType;
14304
14305 public:
14306 const void* pNext;
14307 DescriptorPoolCreateFlags flags;
14308 uint32_t maxSets;
14309 uint32_t poolSizeCount;
14310 const DescriptorPoolSize* pPoolSizes;
14311 };
14312 static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
14313
14314 enum class DependencyFlagBits
14315 {
14316 eByRegion = VK_DEPENDENCY_BY_REGION_BIT
14317 };
14318
14319 using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
14320
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014321 VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014322 {
14323 return DependencyFlags( bit0 ) | bit1;
14324 }
14325
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014326 VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
14327 {
14328 return ~( DependencyFlags( bits ) );
14329 }
14330
14331 template <> struct FlagTraits<DependencyFlagBits>
14332 {
14333 enum
14334 {
14335 allFlags = VkFlags(DependencyFlagBits::eByRegion)
14336 };
14337 };
14338
14339 struct SubpassDependency
14340 {
14341 SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
14342 : srcSubpass( srcSubpass_ )
14343 , dstSubpass( dstSubpass_ )
14344 , srcStageMask( srcStageMask_ )
14345 , dstStageMask( dstStageMask_ )
14346 , srcAccessMask( srcAccessMask_ )
14347 , dstAccessMask( dstAccessMask_ )
14348 , dependencyFlags( dependencyFlags_ )
14349 {
14350 }
14351
14352 SubpassDependency( VkSubpassDependency const & rhs )
14353 {
14354 memcpy( this, &rhs, sizeof(SubpassDependency) );
14355 }
14356
14357 SubpassDependency& operator=( VkSubpassDependency const & rhs )
14358 {
14359 memcpy( this, &rhs, sizeof(SubpassDependency) );
14360 return *this;
14361 }
14362
14363 SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
14364 {
14365 srcSubpass = srcSubpass_;
14366 return *this;
14367 }
14368
14369 SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
14370 {
14371 dstSubpass = dstSubpass_;
14372 return *this;
14373 }
14374
14375 SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
14376 {
14377 srcStageMask = srcStageMask_;
14378 return *this;
14379 }
14380
14381 SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
14382 {
14383 dstStageMask = dstStageMask_;
14384 return *this;
14385 }
14386
14387 SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
14388 {
14389 srcAccessMask = srcAccessMask_;
14390 return *this;
14391 }
14392
14393 SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
14394 {
14395 dstAccessMask = dstAccessMask_;
14396 return *this;
14397 }
14398
14399 SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
14400 {
14401 dependencyFlags = dependencyFlags_;
14402 return *this;
14403 }
14404
14405 operator const VkSubpassDependency&() const
14406 {
14407 return *reinterpret_cast<const VkSubpassDependency*>(this);
14408 }
14409
14410 bool operator==( SubpassDependency const& rhs ) const
14411 {
14412 return ( srcSubpass == rhs.srcSubpass )
14413 && ( dstSubpass == rhs.dstSubpass )
14414 && ( srcStageMask == rhs.srcStageMask )
14415 && ( dstStageMask == rhs.dstStageMask )
14416 && ( srcAccessMask == rhs.srcAccessMask )
14417 && ( dstAccessMask == rhs.dstAccessMask )
14418 && ( dependencyFlags == rhs.dependencyFlags );
14419 }
14420
14421 bool operator!=( SubpassDependency const& rhs ) const
14422 {
14423 return !operator==( rhs );
14424 }
14425
14426 uint32_t srcSubpass;
14427 uint32_t dstSubpass;
14428 PipelineStageFlags srcStageMask;
14429 PipelineStageFlags dstStageMask;
14430 AccessFlags srcAccessMask;
14431 AccessFlags dstAccessMask;
14432 DependencyFlags dependencyFlags;
14433 };
14434 static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
14435
14436 struct RenderPassCreateInfo
14437 {
14438 RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
14439 : sType( StructureType::eRenderPassCreateInfo )
14440 , pNext( nullptr )
14441 , flags( flags_ )
14442 , attachmentCount( attachmentCount_ )
14443 , pAttachments( pAttachments_ )
14444 , subpassCount( subpassCount_ )
14445 , pSubpasses( pSubpasses_ )
14446 , dependencyCount( dependencyCount_ )
14447 , pDependencies( pDependencies_ )
14448 {
14449 }
14450
14451 RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
14452 {
14453 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
14454 }
14455
14456 RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
14457 {
14458 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
14459 return *this;
14460 }
14461
14462 RenderPassCreateInfo& setSType( StructureType sType_ )
14463 {
14464 sType = sType_;
14465 return *this;
14466 }
14467
14468 RenderPassCreateInfo& setPNext( const void* pNext_ )
14469 {
14470 pNext = pNext_;
14471 return *this;
14472 }
14473
14474 RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
14475 {
14476 flags = flags_;
14477 return *this;
14478 }
14479
14480 RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
14481 {
14482 attachmentCount = attachmentCount_;
14483 return *this;
14484 }
14485
14486 RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
14487 {
14488 pAttachments = pAttachments_;
14489 return *this;
14490 }
14491
14492 RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
14493 {
14494 subpassCount = subpassCount_;
14495 return *this;
14496 }
14497
14498 RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
14499 {
14500 pSubpasses = pSubpasses_;
14501 return *this;
14502 }
14503
14504 RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
14505 {
14506 dependencyCount = dependencyCount_;
14507 return *this;
14508 }
14509
14510 RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
14511 {
14512 pDependencies = pDependencies_;
14513 return *this;
14514 }
14515
14516 operator const VkRenderPassCreateInfo&() const
14517 {
14518 return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
14519 }
14520
14521 bool operator==( RenderPassCreateInfo const& rhs ) const
14522 {
14523 return ( sType == rhs.sType )
14524 && ( pNext == rhs.pNext )
14525 && ( flags == rhs.flags )
14526 && ( attachmentCount == rhs.attachmentCount )
14527 && ( pAttachments == rhs.pAttachments )
14528 && ( subpassCount == rhs.subpassCount )
14529 && ( pSubpasses == rhs.pSubpasses )
14530 && ( dependencyCount == rhs.dependencyCount )
14531 && ( pDependencies == rhs.pDependencies );
14532 }
14533
14534 bool operator!=( RenderPassCreateInfo const& rhs ) const
14535 {
14536 return !operator==( rhs );
14537 }
14538
14539 private:
14540 StructureType sType;
14541
14542 public:
14543 const void* pNext;
14544 RenderPassCreateFlags flags;
14545 uint32_t attachmentCount;
14546 const AttachmentDescription* pAttachments;
14547 uint32_t subpassCount;
14548 const SubpassDescription* pSubpasses;
14549 uint32_t dependencyCount;
14550 const SubpassDependency* pDependencies;
14551 };
14552 static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
14553
14554 enum class PresentModeKHR
14555 {
14556 eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
14557 eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
14558 eFifo = VK_PRESENT_MODE_FIFO_KHR,
14559 eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
14560 };
14561
14562 enum class ColorSpaceKHR
14563 {
14564 eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR
14565 };
14566
14567 struct SurfaceFormatKHR
14568 {
14569 operator const VkSurfaceFormatKHR&() const
14570 {
14571 return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
14572 }
14573
14574 bool operator==( SurfaceFormatKHR const& rhs ) const
14575 {
14576 return ( format == rhs.format )
14577 && ( colorSpace == rhs.colorSpace );
14578 }
14579
14580 bool operator!=( SurfaceFormatKHR const& rhs ) const
14581 {
14582 return !operator==( rhs );
14583 }
14584
14585 Format format;
14586 ColorSpaceKHR colorSpace;
14587 };
14588 static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
14589
14590 enum class DisplayPlaneAlphaFlagBitsKHR
14591 {
14592 eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
14593 eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
14594 ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
14595 ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
14596 };
14597
14598 using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
14599
14600 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
14601 {
14602 return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
14603 }
14604
14605 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
14606 {
14607 return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
14608 }
14609
14610 template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
14611 {
14612 enum
14613 {
14614 allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
14615 };
14616 };
14617
14618 struct DisplayPlaneCapabilitiesKHR
14619 {
14620 operator const VkDisplayPlaneCapabilitiesKHR&() const
14621 {
14622 return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
14623 }
14624
14625 bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
14626 {
14627 return ( supportedAlpha == rhs.supportedAlpha )
14628 && ( minSrcPosition == rhs.minSrcPosition )
14629 && ( maxSrcPosition == rhs.maxSrcPosition )
14630 && ( minSrcExtent == rhs.minSrcExtent )
14631 && ( maxSrcExtent == rhs.maxSrcExtent )
14632 && ( minDstPosition == rhs.minDstPosition )
14633 && ( maxDstPosition == rhs.maxDstPosition )
14634 && ( minDstExtent == rhs.minDstExtent )
14635 && ( maxDstExtent == rhs.maxDstExtent );
14636 }
14637
14638 bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
14639 {
14640 return !operator==( rhs );
14641 }
14642
14643 DisplayPlaneAlphaFlagsKHR supportedAlpha;
14644 Offset2D minSrcPosition;
14645 Offset2D maxSrcPosition;
14646 Extent2D minSrcExtent;
14647 Extent2D maxSrcExtent;
14648 Offset2D minDstPosition;
14649 Offset2D maxDstPosition;
14650 Extent2D minDstExtent;
14651 Extent2D maxDstExtent;
14652 };
14653 static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
14654
14655 enum class CompositeAlphaFlagBitsKHR
14656 {
14657 eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
14658 ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
14659 ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
14660 eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
14661 };
14662
14663 using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
14664
14665 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
14666 {
14667 return CompositeAlphaFlagsKHR( bit0 ) | bit1;
14668 }
14669
14670 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
14671 {
14672 return ~( CompositeAlphaFlagsKHR( bits ) );
14673 }
14674
14675 template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
14676 {
14677 enum
14678 {
14679 allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
14680 };
14681 };
14682
14683 enum class SurfaceTransformFlagBitsKHR
14684 {
14685 eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
14686 eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
14687 eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
14688 eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
14689 eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
14690 eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
14691 eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
14692 eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
14693 eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
14694 };
14695
14696 using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
14697
14698 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
14699 {
14700 return SurfaceTransformFlagsKHR( bit0 ) | bit1;
14701 }
14702
14703 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
14704 {
14705 return ~( SurfaceTransformFlagsKHR( bits ) );
14706 }
14707
14708 template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
14709 {
14710 enum
14711 {
14712 allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
14713 };
14714 };
14715
14716 struct DisplayPropertiesKHR
14717 {
14718 operator const VkDisplayPropertiesKHR&() const
14719 {
14720 return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
14721 }
14722
14723 bool operator==( DisplayPropertiesKHR const& rhs ) const
14724 {
14725 return ( display == rhs.display )
14726 && ( displayName == rhs.displayName )
14727 && ( physicalDimensions == rhs.physicalDimensions )
14728 && ( physicalResolution == rhs.physicalResolution )
14729 && ( supportedTransforms == rhs.supportedTransforms )
14730 && ( planeReorderPossible == rhs.planeReorderPossible )
14731 && ( persistentContent == rhs.persistentContent );
14732 }
14733
14734 bool operator!=( DisplayPropertiesKHR const& rhs ) const
14735 {
14736 return !operator==( rhs );
14737 }
14738
14739 DisplayKHR display;
14740 const char* displayName;
14741 Extent2D physicalDimensions;
14742 Extent2D physicalResolution;
14743 SurfaceTransformFlagsKHR supportedTransforms;
14744 Bool32 planeReorderPossible;
14745 Bool32 persistentContent;
14746 };
14747 static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
14748
14749 struct DisplaySurfaceCreateInfoKHR
14750 {
14751 DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
14752 : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
14753 , pNext( nullptr )
14754 , flags( flags_ )
14755 , displayMode( displayMode_ )
14756 , planeIndex( planeIndex_ )
14757 , planeStackIndex( planeStackIndex_ )
14758 , transform( transform_ )
14759 , globalAlpha( globalAlpha_ )
14760 , alphaMode( alphaMode_ )
14761 , imageExtent( imageExtent_ )
14762 {
14763 }
14764
14765 DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
14766 {
14767 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
14768 }
14769
14770 DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
14771 {
14772 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
14773 return *this;
14774 }
14775
14776 DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
14777 {
14778 sType = sType_;
14779 return *this;
14780 }
14781
14782 DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
14783 {
14784 pNext = pNext_;
14785 return *this;
14786 }
14787
14788 DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
14789 {
14790 flags = flags_;
14791 return *this;
14792 }
14793
14794 DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
14795 {
14796 displayMode = displayMode_;
14797 return *this;
14798 }
14799
14800 DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
14801 {
14802 planeIndex = planeIndex_;
14803 return *this;
14804 }
14805
14806 DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
14807 {
14808 planeStackIndex = planeStackIndex_;
14809 return *this;
14810 }
14811
14812 DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
14813 {
14814 transform = transform_;
14815 return *this;
14816 }
14817
14818 DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
14819 {
14820 globalAlpha = globalAlpha_;
14821 return *this;
14822 }
14823
14824 DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
14825 {
14826 alphaMode = alphaMode_;
14827 return *this;
14828 }
14829
14830 DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
14831 {
14832 imageExtent = imageExtent_;
14833 return *this;
14834 }
14835
14836 operator const VkDisplaySurfaceCreateInfoKHR&() const
14837 {
14838 return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
14839 }
14840
14841 bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
14842 {
14843 return ( sType == rhs.sType )
14844 && ( pNext == rhs.pNext )
14845 && ( flags == rhs.flags )
14846 && ( displayMode == rhs.displayMode )
14847 && ( planeIndex == rhs.planeIndex )
14848 && ( planeStackIndex == rhs.planeStackIndex )
14849 && ( transform == rhs.transform )
14850 && ( globalAlpha == rhs.globalAlpha )
14851 && ( alphaMode == rhs.alphaMode )
14852 && ( imageExtent == rhs.imageExtent );
14853 }
14854
14855 bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
14856 {
14857 return !operator==( rhs );
14858 }
14859
14860 private:
14861 StructureType sType;
14862
14863 public:
14864 const void* pNext;
14865 DisplaySurfaceCreateFlagsKHR flags;
14866 DisplayModeKHR displayMode;
14867 uint32_t planeIndex;
14868 uint32_t planeStackIndex;
14869 SurfaceTransformFlagBitsKHR transform;
14870 float globalAlpha;
14871 DisplayPlaneAlphaFlagBitsKHR alphaMode;
14872 Extent2D imageExtent;
14873 };
14874 static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
14875
14876 struct SurfaceCapabilitiesKHR
14877 {
14878 operator const VkSurfaceCapabilitiesKHR&() const
14879 {
14880 return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
14881 }
14882
14883 bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
14884 {
14885 return ( minImageCount == rhs.minImageCount )
14886 && ( maxImageCount == rhs.maxImageCount )
14887 && ( currentExtent == rhs.currentExtent )
14888 && ( minImageExtent == rhs.minImageExtent )
14889 && ( maxImageExtent == rhs.maxImageExtent )
14890 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
14891 && ( supportedTransforms == rhs.supportedTransforms )
14892 && ( currentTransform == rhs.currentTransform )
14893 && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
14894 && ( supportedUsageFlags == rhs.supportedUsageFlags );
14895 }
14896
14897 bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
14898 {
14899 return !operator==( rhs );
14900 }
14901
14902 uint32_t minImageCount;
14903 uint32_t maxImageCount;
14904 Extent2D currentExtent;
14905 Extent2D minImageExtent;
14906 Extent2D maxImageExtent;
14907 uint32_t maxImageArrayLayers;
14908 SurfaceTransformFlagsKHR supportedTransforms;
14909 SurfaceTransformFlagBitsKHR currentTransform;
14910 CompositeAlphaFlagsKHR supportedCompositeAlpha;
14911 ImageUsageFlags supportedUsageFlags;
14912 };
14913 static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
14914
14915 struct SwapchainCreateInfoKHR
14916 {
14917 SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
14918 : sType( StructureType::eSwapchainCreateInfoKHR )
14919 , pNext( nullptr )
14920 , flags( flags_ )
14921 , surface( surface_ )
14922 , minImageCount( minImageCount_ )
14923 , imageFormat( imageFormat_ )
14924 , imageColorSpace( imageColorSpace_ )
14925 , imageExtent( imageExtent_ )
14926 , imageArrayLayers( imageArrayLayers_ )
14927 , imageUsage( imageUsage_ )
14928 , imageSharingMode( imageSharingMode_ )
14929 , queueFamilyIndexCount( queueFamilyIndexCount_ )
14930 , pQueueFamilyIndices( pQueueFamilyIndices_ )
14931 , preTransform( preTransform_ )
14932 , compositeAlpha( compositeAlpha_ )
14933 , presentMode( presentMode_ )
14934 , clipped( clipped_ )
14935 , oldSwapchain( oldSwapchain_ )
14936 {
14937 }
14938
14939 SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
14940 {
14941 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
14942 }
14943
14944 SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
14945 {
14946 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
14947 return *this;
14948 }
14949
14950 SwapchainCreateInfoKHR& setSType( StructureType sType_ )
14951 {
14952 sType = sType_;
14953 return *this;
14954 }
14955
14956 SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
14957 {
14958 pNext = pNext_;
14959 return *this;
14960 }
14961
14962 SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
14963 {
14964 flags = flags_;
14965 return *this;
14966 }
14967
14968 SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
14969 {
14970 surface = surface_;
14971 return *this;
14972 }
14973
14974 SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
14975 {
14976 minImageCount = minImageCount_;
14977 return *this;
14978 }
14979
14980 SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
14981 {
14982 imageFormat = imageFormat_;
14983 return *this;
14984 }
14985
14986 SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
14987 {
14988 imageColorSpace = imageColorSpace_;
14989 return *this;
14990 }
14991
14992 SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
14993 {
14994 imageExtent = imageExtent_;
14995 return *this;
14996 }
14997
14998 SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
14999 {
15000 imageArrayLayers = imageArrayLayers_;
15001 return *this;
15002 }
15003
15004 SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
15005 {
15006 imageUsage = imageUsage_;
15007 return *this;
15008 }
15009
15010 SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
15011 {
15012 imageSharingMode = imageSharingMode_;
15013 return *this;
15014 }
15015
15016 SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
15017 {
15018 queueFamilyIndexCount = queueFamilyIndexCount_;
15019 return *this;
15020 }
15021
15022 SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
15023 {
15024 pQueueFamilyIndices = pQueueFamilyIndices_;
15025 return *this;
15026 }
15027
15028 SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
15029 {
15030 preTransform = preTransform_;
15031 return *this;
15032 }
15033
15034 SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
15035 {
15036 compositeAlpha = compositeAlpha_;
15037 return *this;
15038 }
15039
15040 SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
15041 {
15042 presentMode = presentMode_;
15043 return *this;
15044 }
15045
15046 SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
15047 {
15048 clipped = clipped_;
15049 return *this;
15050 }
15051
15052 SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
15053 {
15054 oldSwapchain = oldSwapchain_;
15055 return *this;
15056 }
15057
15058 operator const VkSwapchainCreateInfoKHR&() const
15059 {
15060 return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
15061 }
15062
15063 bool operator==( SwapchainCreateInfoKHR const& rhs ) const
15064 {
15065 return ( sType == rhs.sType )
15066 && ( pNext == rhs.pNext )
15067 && ( flags == rhs.flags )
15068 && ( surface == rhs.surface )
15069 && ( minImageCount == rhs.minImageCount )
15070 && ( imageFormat == rhs.imageFormat )
15071 && ( imageColorSpace == rhs.imageColorSpace )
15072 && ( imageExtent == rhs.imageExtent )
15073 && ( imageArrayLayers == rhs.imageArrayLayers )
15074 && ( imageUsage == rhs.imageUsage )
15075 && ( imageSharingMode == rhs.imageSharingMode )
15076 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
15077 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
15078 && ( preTransform == rhs.preTransform )
15079 && ( compositeAlpha == rhs.compositeAlpha )
15080 && ( presentMode == rhs.presentMode )
15081 && ( clipped == rhs.clipped )
15082 && ( oldSwapchain == rhs.oldSwapchain );
15083 }
15084
15085 bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
15086 {
15087 return !operator==( rhs );
15088 }
15089
15090 private:
15091 StructureType sType;
15092
15093 public:
15094 const void* pNext;
15095 SwapchainCreateFlagsKHR flags;
15096 SurfaceKHR surface;
15097 uint32_t minImageCount;
15098 Format imageFormat;
15099 ColorSpaceKHR imageColorSpace;
15100 Extent2D imageExtent;
15101 uint32_t imageArrayLayers;
15102 ImageUsageFlags imageUsage;
15103 SharingMode imageSharingMode;
15104 uint32_t queueFamilyIndexCount;
15105 const uint32_t* pQueueFamilyIndices;
15106 SurfaceTransformFlagBitsKHR preTransform;
15107 CompositeAlphaFlagBitsKHR compositeAlpha;
15108 PresentModeKHR presentMode;
15109 Bool32 clipped;
15110 SwapchainKHR oldSwapchain;
15111 };
15112 static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
15113
15114 enum class DebugReportFlagBitsEXT
15115 {
15116 eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
15117 eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
15118 ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
15119 eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
15120 eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
15121 };
15122
15123 using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
15124
15125 VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
15126 {
15127 return DebugReportFlagsEXT( bit0 ) | bit1;
15128 }
15129
15130 VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
15131 {
15132 return ~( DebugReportFlagsEXT( bits ) );
15133 }
15134
15135 template <> struct FlagTraits<DebugReportFlagBitsEXT>
15136 {
15137 enum
15138 {
15139 allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
15140 };
15141 };
15142
15143 struct DebugReportCallbackCreateInfoEXT
15144 {
15145 DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
15146 : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
15147 , pNext( nullptr )
15148 , flags( flags_ )
15149 , pfnCallback( pfnCallback_ )
15150 , pUserData( pUserData_ )
15151 {
15152 }
15153
15154 DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
15155 {
15156 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15157 }
15158
15159 DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
15160 {
15161 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15162 return *this;
15163 }
15164
15165 DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
15166 {
15167 sType = sType_;
15168 return *this;
15169 }
15170
15171 DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
15172 {
15173 pNext = pNext_;
15174 return *this;
15175 }
15176
15177 DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
15178 {
15179 flags = flags_;
15180 return *this;
15181 }
15182
15183 DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
15184 {
15185 pfnCallback = pfnCallback_;
15186 return *this;
15187 }
15188
15189 DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
15190 {
15191 pUserData = pUserData_;
15192 return *this;
15193 }
15194
15195 operator const VkDebugReportCallbackCreateInfoEXT&() const
15196 {
15197 return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
15198 }
15199
15200 bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
15201 {
15202 return ( sType == rhs.sType )
15203 && ( pNext == rhs.pNext )
15204 && ( flags == rhs.flags )
15205 && ( pfnCallback == rhs.pfnCallback )
15206 && ( pUserData == rhs.pUserData );
15207 }
15208
15209 bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
15210 {
15211 return !operator==( rhs );
15212 }
15213
15214 private:
15215 StructureType sType;
15216
15217 public:
15218 const void* pNext;
15219 DebugReportFlagsEXT flags;
15220 PFN_vkDebugReportCallbackEXT pfnCallback;
15221 void* pUserData;
15222 };
15223 static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
15224
15225 enum class DebugReportObjectTypeEXT
15226 {
15227 eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
15228 eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
15229 ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
15230 eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
15231 eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
15232 eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
15233 eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
15234 eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
15235 eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
15236 eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
15237 eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
15238 eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
15239 eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
15240 eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
15241 eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
15242 eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
15243 ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
15244 ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
15245 eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
15246 ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
15247 eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
15248 eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
15249 eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
15250 eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
15251 eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
15252 eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
15253 eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
15254 eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
15255 eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
15256 eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
15257 eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
15258 eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
15259 eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT
15260 };
15261
15262 struct DebugMarkerObjectNameInfoEXT
15263 {
15264 DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
15265 : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
15266 , pNext( nullptr )
15267 , objectType( objectType_ )
15268 , object( object_ )
15269 , pObjectName( pObjectName_ )
15270 {
15271 }
15272
15273 DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
15274 {
15275 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15276 }
15277
15278 DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
15279 {
15280 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15281 return *this;
15282 }
15283
15284 DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
15285 {
15286 sType = sType_;
15287 return *this;
15288 }
15289
15290 DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
15291 {
15292 pNext = pNext_;
15293 return *this;
15294 }
15295
15296 DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15297 {
15298 objectType = objectType_;
15299 return *this;
15300 }
15301
15302 DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
15303 {
15304 object = object_;
15305 return *this;
15306 }
15307
15308 DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
15309 {
15310 pObjectName = pObjectName_;
15311 return *this;
15312 }
15313
15314 operator const VkDebugMarkerObjectNameInfoEXT&() const
15315 {
15316 return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
15317 }
15318
15319 bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
15320 {
15321 return ( sType == rhs.sType )
15322 && ( pNext == rhs.pNext )
15323 && ( objectType == rhs.objectType )
15324 && ( object == rhs.object )
15325 && ( pObjectName == rhs.pObjectName );
15326 }
15327
15328 bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
15329 {
15330 return !operator==( rhs );
15331 }
15332
15333 private:
15334 StructureType sType;
15335
15336 public:
15337 const void* pNext;
15338 DebugReportObjectTypeEXT objectType;
15339 uint64_t object;
15340 const char* pObjectName;
15341 };
15342 static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
15343
15344 struct DebugMarkerObjectTagInfoEXT
15345 {
15346 DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
15347 : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
15348 , pNext( nullptr )
15349 , objectType( objectType_ )
15350 , object( object_ )
15351 , tagName( tagName_ )
15352 , tagSize( tagSize_ )
15353 , pTag( pTag_ )
15354 {
15355 }
15356
15357 DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
15358 {
15359 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15360 }
15361
15362 DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
15363 {
15364 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15365 return *this;
15366 }
15367
15368 DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
15369 {
15370 sType = sType_;
15371 return *this;
15372 }
15373
15374 DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
15375 {
15376 pNext = pNext_;
15377 return *this;
15378 }
15379
15380 DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15381 {
15382 objectType = objectType_;
15383 return *this;
15384 }
15385
15386 DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
15387 {
15388 object = object_;
15389 return *this;
15390 }
15391
15392 DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
15393 {
15394 tagName = tagName_;
15395 return *this;
15396 }
15397
15398 DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
15399 {
15400 tagSize = tagSize_;
15401 return *this;
15402 }
15403
15404 DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
15405 {
15406 pTag = pTag_;
15407 return *this;
15408 }
15409
15410 operator const VkDebugMarkerObjectTagInfoEXT&() const
15411 {
15412 return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
15413 }
15414
15415 bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
15416 {
15417 return ( sType == rhs.sType )
15418 && ( pNext == rhs.pNext )
15419 && ( objectType == rhs.objectType )
15420 && ( object == rhs.object )
15421 && ( tagName == rhs.tagName )
15422 && ( tagSize == rhs.tagSize )
15423 && ( pTag == rhs.pTag );
15424 }
15425
15426 bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
15427 {
15428 return !operator==( rhs );
15429 }
15430
15431 private:
15432 StructureType sType;
15433
15434 public:
15435 const void* pNext;
15436 DebugReportObjectTypeEXT objectType;
15437 uint64_t object;
15438 uint64_t tagName;
15439 size_t tagSize;
15440 const void* pTag;
15441 };
15442 static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
15443
15444 enum class DebugReportErrorEXT
15445 {
15446 eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
15447 eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
15448 };
15449
15450 enum class RasterizationOrderAMD
15451 {
15452 eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
15453 eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
15454 };
15455
15456 struct PipelineRasterizationStateRasterizationOrderAMD
15457 {
15458 PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
15459 : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
15460 , pNext( nullptr )
15461 , rasterizationOrder( rasterizationOrder_ )
15462 {
15463 }
15464
15465 PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
15466 {
15467 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
15468 }
15469
15470 PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
15471 {
15472 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
15473 return *this;
15474 }
15475
15476 PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
15477 {
15478 sType = sType_;
15479 return *this;
15480 }
15481
15482 PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
15483 {
15484 pNext = pNext_;
15485 return *this;
15486 }
15487
15488 PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
15489 {
15490 rasterizationOrder = rasterizationOrder_;
15491 return *this;
15492 }
15493
15494 operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
15495 {
15496 return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
15497 }
15498
15499 bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
15500 {
15501 return ( sType == rhs.sType )
15502 && ( pNext == rhs.pNext )
15503 && ( rasterizationOrder == rhs.rasterizationOrder );
15504 }
15505
15506 bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
15507 {
15508 return !operator==( rhs );
15509 }
15510
15511 private:
15512 StructureType sType;
15513
15514 public:
15515 const void* pNext;
15516 RasterizationOrderAMD rasterizationOrder;
15517 };
15518 static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
15519
15520 enum class ExternalMemoryHandleTypeFlagBitsNV
15521 {
15522 eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
15523 eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
15524 eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
15525 eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
15526 };
15527
15528 using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
15529
15530 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
15531 {
15532 return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
15533 }
15534
15535 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
15536 {
15537 return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
15538 }
15539
15540 template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
15541 {
15542 enum
15543 {
15544 allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
15545 };
15546 };
15547
15548 struct ExternalMemoryImageCreateInfoNV
15549 {
15550 ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
15551 : sType( StructureType::eExternalMemoryImageCreateInfoNV )
15552 , pNext( nullptr )
15553 , handleTypes( handleTypes_ )
15554 {
15555 }
15556
15557 ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
15558 {
15559 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
15560 }
15561
15562 ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
15563 {
15564 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
15565 return *this;
15566 }
15567
15568 ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
15569 {
15570 sType = sType_;
15571 return *this;
15572 }
15573
15574 ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
15575 {
15576 pNext = pNext_;
15577 return *this;
15578 }
15579
15580 ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
15581 {
15582 handleTypes = handleTypes_;
15583 return *this;
15584 }
15585
15586 operator const VkExternalMemoryImageCreateInfoNV&() const
15587 {
15588 return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
15589 }
15590
15591 bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
15592 {
15593 return ( sType == rhs.sType )
15594 && ( pNext == rhs.pNext )
15595 && ( handleTypes == rhs.handleTypes );
15596 }
15597
15598 bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
15599 {
15600 return !operator==( rhs );
15601 }
15602
15603 private:
15604 StructureType sType;
15605
15606 public:
15607 const void* pNext;
15608 ExternalMemoryHandleTypeFlagsNV handleTypes;
15609 };
15610 static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
15611
15612 struct ExportMemoryAllocateInfoNV
15613 {
15614 ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
15615 : sType( StructureType::eExportMemoryAllocateInfoNV )
15616 , pNext( nullptr )
15617 , handleTypes( handleTypes_ )
15618 {
15619 }
15620
15621 ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
15622 {
15623 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
15624 }
15625
15626 ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
15627 {
15628 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
15629 return *this;
15630 }
15631
15632 ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
15633 {
15634 sType = sType_;
15635 return *this;
15636 }
15637
15638 ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
15639 {
15640 pNext = pNext_;
15641 return *this;
15642 }
15643
15644 ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
15645 {
15646 handleTypes = handleTypes_;
15647 return *this;
15648 }
15649
15650 operator const VkExportMemoryAllocateInfoNV&() const
15651 {
15652 return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
15653 }
15654
15655 bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
15656 {
15657 return ( sType == rhs.sType )
15658 && ( pNext == rhs.pNext )
15659 && ( handleTypes == rhs.handleTypes );
15660 }
15661
15662 bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
15663 {
15664 return !operator==( rhs );
15665 }
15666
15667 private:
15668 StructureType sType;
15669
15670 public:
15671 const void* pNext;
15672 ExternalMemoryHandleTypeFlagsNV handleTypes;
15673 };
15674 static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
15675
15676#ifdef VK_USE_PLATFORM_WIN32_KHR
15677 struct ImportMemoryWin32HandleInfoNV
15678 {
15679 ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
15680 : sType( StructureType::eImportMemoryWin32HandleInfoNV )
15681 , pNext( nullptr )
15682 , handleType( handleType_ )
15683 , handle( handle_ )
15684 {
15685 }
15686
15687 ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
15688 {
15689 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
15690 }
15691
15692 ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
15693 {
15694 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
15695 return *this;
15696 }
15697
15698 ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
15699 {
15700 sType = sType_;
15701 return *this;
15702 }
15703
15704 ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
15705 {
15706 pNext = pNext_;
15707 return *this;
15708 }
15709
15710 ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
15711 {
15712 handleType = handleType_;
15713 return *this;
15714 }
15715
15716 ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
15717 {
15718 handle = handle_;
15719 return *this;
15720 }
15721
15722 operator const VkImportMemoryWin32HandleInfoNV&() const
15723 {
15724 return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
15725 }
15726
15727 bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
15728 {
15729 return ( sType == rhs.sType )
15730 && ( pNext == rhs.pNext )
15731 && ( handleType == rhs.handleType )
15732 && ( handle == rhs.handle );
15733 }
15734
15735 bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
15736 {
15737 return !operator==( rhs );
15738 }
15739
15740 private:
15741 StructureType sType;
15742
15743 public:
15744 const void* pNext;
15745 ExternalMemoryHandleTypeFlagsNV handleType;
15746 HANDLE handle;
15747 };
15748 static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
15749#endif /*VK_USE_PLATFORM_WIN32_KHR*/
15750
15751 enum class ExternalMemoryFeatureFlagBitsNV
15752 {
15753 eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
15754 eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
15755 eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
15756 };
15757
15758 using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
15759
15760 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
15761 {
15762 return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
15763 }
15764
15765 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
15766 {
15767 return ~( ExternalMemoryFeatureFlagsNV( bits ) );
15768 }
15769
15770 template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
15771 {
15772 enum
15773 {
15774 allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
15775 };
15776 };
15777
15778 struct ExternalImageFormatPropertiesNV
15779 {
15780 operator const VkExternalImageFormatPropertiesNV&() const
15781 {
15782 return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
15783 }
15784
15785 bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
15786 {
15787 return ( imageFormatProperties == rhs.imageFormatProperties )
15788 && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
15789 && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
15790 && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
15791 }
15792
15793 bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
15794 {
15795 return !operator==( rhs );
15796 }
15797
15798 ImageFormatProperties imageFormatProperties;
15799 ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
15800 ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
15801 ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
15802 };
15803 static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
15804
15805 enum class ValidationCheckEXT
15806 {
15807 eAll = VK_VALIDATION_CHECK_ALL_EXT
15808 };
15809
15810 struct ValidationFlagsEXT
15811 {
15812 ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
15813 : sType( StructureType::eValidationFlagsEXT )
15814 , pNext( nullptr )
15815 , disabledValidationCheckCount( disabledValidationCheckCount_ )
15816 , pDisabledValidationChecks( pDisabledValidationChecks_ )
15817 {
15818 }
15819
15820 ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
15821 {
15822 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
15823 }
15824
15825 ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
15826 {
15827 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
15828 return *this;
15829 }
15830
15831 ValidationFlagsEXT& setSType( StructureType sType_ )
15832 {
15833 sType = sType_;
15834 return *this;
15835 }
15836
15837 ValidationFlagsEXT& setPNext( const void* pNext_ )
15838 {
15839 pNext = pNext_;
15840 return *this;
15841 }
15842
15843 ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
15844 {
15845 disabledValidationCheckCount = disabledValidationCheckCount_;
15846 return *this;
15847 }
15848
15849 ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
15850 {
15851 pDisabledValidationChecks = pDisabledValidationChecks_;
15852 return *this;
15853 }
15854
15855 operator const VkValidationFlagsEXT&() const
15856 {
15857 return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
15858 }
15859
15860 bool operator==( ValidationFlagsEXT const& rhs ) const
15861 {
15862 return ( sType == rhs.sType )
15863 && ( pNext == rhs.pNext )
15864 && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
15865 && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
15866 }
15867
15868 bool operator!=( ValidationFlagsEXT const& rhs ) const
15869 {
15870 return !operator==( rhs );
15871 }
15872
15873 private:
15874 StructureType sType;
15875
15876 public:
15877 const void* pNext;
15878 uint32_t disabledValidationCheckCount;
15879 ValidationCheckEXT* pDisabledValidationChecks;
15880 };
15881 static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
15882
15883 enum class IndirectCommandsLayoutUsageFlagBitsNVX
15884 {
15885 eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
15886 eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
15887 eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
15888 eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
15889 };
15890
15891 using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
15892
15893 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
15894 {
15895 return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
15896 }
15897
15898 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
15899 {
15900 return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
15901 }
15902
15903 template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
15904 {
15905 enum
15906 {
15907 allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
15908 };
15909 };
15910
15911 enum class ObjectEntryUsageFlagBitsNVX
15912 {
15913 eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
15914 eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
15915 };
15916
15917 using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
15918
15919 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
15920 {
15921 return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
15922 }
15923
15924 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
15925 {
15926 return ~( ObjectEntryUsageFlagsNVX( bits ) );
15927 }
15928
15929 template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
15930 {
15931 enum
15932 {
15933 allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
15934 };
15935 };
15936
15937 enum class IndirectCommandsTokenTypeNVX
15938 {
15939 eVkIndirectCommandsTokenPipeline = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
15940 eVkIndirectCommandsTokenDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX,
15941 eVkIndirectCommandsTokenIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX,
15942 eVkIndirectCommandsTokenVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX,
15943 eVkIndirectCommandsTokenPushConstant = VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX,
15944 eVkIndirectCommandsTokenDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX,
15945 eVkIndirectCommandsTokenDraw = VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX,
15946 eVkIndirectCommandsTokenDispatch = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX
15947 };
15948
15949 struct IndirectCommandsTokenNVX
15950 {
15951 IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
15952 : tokenType( tokenType_ )
15953 , buffer( buffer_ )
15954 , offset( offset_ )
15955 {
15956 }
15957
15958 IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
15959 {
15960 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
15961 }
15962
15963 IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
15964 {
15965 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
15966 return *this;
15967 }
15968
15969 IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
15970 {
15971 tokenType = tokenType_;
15972 return *this;
15973 }
15974
15975 IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
15976 {
15977 buffer = buffer_;
15978 return *this;
15979 }
15980
15981 IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
15982 {
15983 offset = offset_;
15984 return *this;
15985 }
15986
15987 operator const VkIndirectCommandsTokenNVX&() const
15988 {
15989 return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
15990 }
15991
15992 bool operator==( IndirectCommandsTokenNVX const& rhs ) const
15993 {
15994 return ( tokenType == rhs.tokenType )
15995 && ( buffer == rhs.buffer )
15996 && ( offset == rhs.offset );
15997 }
15998
15999 bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
16000 {
16001 return !operator==( rhs );
16002 }
16003
16004 IndirectCommandsTokenTypeNVX tokenType;
16005 Buffer buffer;
16006 DeviceSize offset;
16007 };
16008 static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
16009
16010 struct IndirectCommandsLayoutTokenNVX
16011 {
16012 IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
16013 : tokenType( tokenType_ )
16014 , bindingUnit( bindingUnit_ )
16015 , dynamicCount( dynamicCount_ )
16016 , divisor( divisor_ )
16017 {
16018 }
16019
16020 IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
16021 {
16022 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16023 }
16024
16025 IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
16026 {
16027 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16028 return *this;
16029 }
16030
16031 IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
16032 {
16033 tokenType = tokenType_;
16034 return *this;
16035 }
16036
16037 IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
16038 {
16039 bindingUnit = bindingUnit_;
16040 return *this;
16041 }
16042
16043 IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
16044 {
16045 dynamicCount = dynamicCount_;
16046 return *this;
16047 }
16048
16049 IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
16050 {
16051 divisor = divisor_;
16052 return *this;
16053 }
16054
16055 operator const VkIndirectCommandsLayoutTokenNVX&() const
16056 {
16057 return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
16058 }
16059
16060 bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
16061 {
16062 return ( tokenType == rhs.tokenType )
16063 && ( bindingUnit == rhs.bindingUnit )
16064 && ( dynamicCount == rhs.dynamicCount )
16065 && ( divisor == rhs.divisor );
16066 }
16067
16068 bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
16069 {
16070 return !operator==( rhs );
16071 }
16072
16073 IndirectCommandsTokenTypeNVX tokenType;
16074 uint32_t bindingUnit;
16075 uint32_t dynamicCount;
16076 uint32_t divisor;
16077 };
16078 static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
16079
16080 struct IndirectCommandsLayoutCreateInfoNVX
16081 {
16082 IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
16083 : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
16084 , pNext( nullptr )
16085 , pipelineBindPoint( pipelineBindPoint_ )
16086 , flags( flags_ )
16087 , tokenCount( tokenCount_ )
16088 , pTokens( pTokens_ )
16089 {
16090 }
16091
16092 IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16093 {
16094 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16095 }
16096
16097 IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16098 {
16099 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16100 return *this;
16101 }
16102
16103 IndirectCommandsLayoutCreateInfoNVX& setSType( StructureType sType_ )
16104 {
16105 sType = sType_;
16106 return *this;
16107 }
16108
16109 IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
16110 {
16111 pNext = pNext_;
16112 return *this;
16113 }
16114
16115 IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
16116 {
16117 pipelineBindPoint = pipelineBindPoint_;
16118 return *this;
16119 }
16120
16121 IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
16122 {
16123 flags = flags_;
16124 return *this;
16125 }
16126
16127 IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
16128 {
16129 tokenCount = tokenCount_;
16130 return *this;
16131 }
16132
16133 IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
16134 {
16135 pTokens = pTokens_;
16136 return *this;
16137 }
16138
16139 operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
16140 {
16141 return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
16142 }
16143
16144 bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16145 {
16146 return ( sType == rhs.sType )
16147 && ( pNext == rhs.pNext )
16148 && ( pipelineBindPoint == rhs.pipelineBindPoint )
16149 && ( flags == rhs.flags )
16150 && ( tokenCount == rhs.tokenCount )
16151 && ( pTokens == rhs.pTokens );
16152 }
16153
16154 bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16155 {
16156 return !operator==( rhs );
16157 }
16158
16159 private:
16160 StructureType sType;
16161
16162 public:
16163 const void* pNext;
16164 PipelineBindPoint pipelineBindPoint;
16165 IndirectCommandsLayoutUsageFlagsNVX flags;
16166 uint32_t tokenCount;
16167 const IndirectCommandsLayoutTokenNVX* pTokens;
16168 };
16169 static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
16170
16171 enum class ObjectEntryTypeNVX
16172 {
16173 eVkObjectEntryDescriptorSet = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
16174 eVkObjectEntryPipeline = VK_OBJECT_ENTRY_PIPELINE_NVX,
16175 eVkObjectEntryIndexBuffer = VK_OBJECT_ENTRY_INDEX_BUFFER_NVX,
16176 eVkObjectEntryVertexBuffer = VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX,
16177 eVkObjectEntryPushConstant = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX
16178 };
16179
16180 struct ObjectTableCreateInfoNVX
16181 {
16182 ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
16183 : sType( StructureType::eObjectTableCreateInfoNVX )
16184 , pNext( nullptr )
16185 , objectCount( objectCount_ )
16186 , pObjectEntryTypes( pObjectEntryTypes_ )
16187 , pObjectEntryCounts( pObjectEntryCounts_ )
16188 , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
16189 , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
16190 , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
16191 , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
16192 , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
16193 , maxPipelineLayouts( maxPipelineLayouts_ )
16194 {
16195 }
16196
16197 ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
16198 {
16199 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16200 }
16201
16202 ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
16203 {
16204 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16205 return *this;
16206 }
16207
16208 ObjectTableCreateInfoNVX& setSType( StructureType sType_ )
16209 {
16210 sType = sType_;
16211 return *this;
16212 }
16213
16214 ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
16215 {
16216 pNext = pNext_;
16217 return *this;
16218 }
16219
16220 ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
16221 {
16222 objectCount = objectCount_;
16223 return *this;
16224 }
16225
16226 ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
16227 {
16228 pObjectEntryTypes = pObjectEntryTypes_;
16229 return *this;
16230 }
16231
16232 ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
16233 {
16234 pObjectEntryCounts = pObjectEntryCounts_;
16235 return *this;
16236 }
16237
16238 ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
16239 {
16240 pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
16241 return *this;
16242 }
16243
16244 ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
16245 {
16246 maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
16247 return *this;
16248 }
16249
16250 ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
16251 {
16252 maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
16253 return *this;
16254 }
16255
16256 ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
16257 {
16258 maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
16259 return *this;
16260 }
16261
16262 ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
16263 {
16264 maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
16265 return *this;
16266 }
16267
16268 ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
16269 {
16270 maxPipelineLayouts = maxPipelineLayouts_;
16271 return *this;
16272 }
16273
16274 operator const VkObjectTableCreateInfoNVX&() const
16275 {
16276 return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
16277 }
16278
16279 bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
16280 {
16281 return ( sType == rhs.sType )
16282 && ( pNext == rhs.pNext )
16283 && ( objectCount == rhs.objectCount )
16284 && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
16285 && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
16286 && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
16287 && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
16288 && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
16289 && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
16290 && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
16291 && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
16292 }
16293
16294 bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
16295 {
16296 return !operator==( rhs );
16297 }
16298
16299 private:
16300 StructureType sType;
16301
16302 public:
16303 const void* pNext;
16304 uint32_t objectCount;
16305 const ObjectEntryTypeNVX* pObjectEntryTypes;
16306 const uint32_t* pObjectEntryCounts;
16307 const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
16308 uint32_t maxUniformBuffersPerDescriptor;
16309 uint32_t maxStorageBuffersPerDescriptor;
16310 uint32_t maxStorageImagesPerDescriptor;
16311 uint32_t maxSampledImagesPerDescriptor;
16312 uint32_t maxPipelineLayouts;
16313 };
16314 static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
16315
16316 struct ObjectTableEntryNVX
16317 {
16318 ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
16319 : type( type_ )
16320 , flags( flags_ )
16321 {
16322 }
16323
16324 ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
16325 {
16326 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16327 }
16328
16329 ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
16330 {
16331 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16332 return *this;
16333 }
16334
16335 ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
16336 {
16337 type = type_;
16338 return *this;
16339 }
16340
16341 ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16342 {
16343 flags = flags_;
16344 return *this;
16345 }
16346
16347 operator const VkObjectTableEntryNVX&() const
16348 {
16349 return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
16350 }
16351
16352 bool operator==( ObjectTableEntryNVX const& rhs ) const
16353 {
16354 return ( type == rhs.type )
16355 && ( flags == rhs.flags );
16356 }
16357
16358 bool operator!=( ObjectTableEntryNVX const& rhs ) const
16359 {
16360 return !operator==( rhs );
16361 }
16362
16363 ObjectEntryTypeNVX type;
16364 ObjectEntryUsageFlagsNVX flags;
16365 };
16366 static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
16367
16368 struct ObjectTablePipelineEntryNVX
16369 {
16370 ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
16371 : type( type_ )
16372 , flags( flags_ )
16373 , pipeline( pipeline_ )
16374 {
16375 }
16376
16377 ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
16378 {
16379 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16380 }
16381
16382 ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
16383 {
16384 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16385 return *this;
16386 }
16387
16388 ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
16389 {
16390 type = type_;
16391 return *this;
16392 }
16393
16394 ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16395 {
16396 flags = flags_;
16397 return *this;
16398 }
16399
16400 ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
16401 {
16402 pipeline = pipeline_;
16403 return *this;
16404 }
16405
16406 operator const VkObjectTablePipelineEntryNVX&() const
16407 {
16408 return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
16409 }
16410
16411 bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
16412 {
16413 return ( type == rhs.type )
16414 && ( flags == rhs.flags )
16415 && ( pipeline == rhs.pipeline );
16416 }
16417
16418 bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
16419 {
16420 return !operator==( rhs );
16421 }
16422
16423 ObjectEntryTypeNVX type;
16424 ObjectEntryUsageFlagsNVX flags;
16425 Pipeline pipeline;
16426 };
16427 static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
16428
16429 struct ObjectTableDescriptorSetEntryNVX
16430 {
16431 ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
16432 : type( type_ )
16433 , flags( flags_ )
16434 , pipelineLayout( pipelineLayout_ )
16435 , descriptorSet( descriptorSet_ )
16436 {
16437 }
16438
16439 ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
16440 {
16441 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
16442 }
16443
16444 ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
16445 {
16446 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
16447 return *this;
16448 }
16449
16450 ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
16451 {
16452 type = type_;
16453 return *this;
16454 }
16455
16456 ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16457 {
16458 flags = flags_;
16459 return *this;
16460 }
16461
16462 ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
16463 {
16464 pipelineLayout = pipelineLayout_;
16465 return *this;
16466 }
16467
16468 ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
16469 {
16470 descriptorSet = descriptorSet_;
16471 return *this;
16472 }
16473
16474 operator const VkObjectTableDescriptorSetEntryNVX&() const
16475 {
16476 return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
16477 }
16478
16479 bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
16480 {
16481 return ( type == rhs.type )
16482 && ( flags == rhs.flags )
16483 && ( pipelineLayout == rhs.pipelineLayout )
16484 && ( descriptorSet == rhs.descriptorSet );
16485 }
16486
16487 bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
16488 {
16489 return !operator==( rhs );
16490 }
16491
16492 ObjectEntryTypeNVX type;
16493 ObjectEntryUsageFlagsNVX flags;
16494 PipelineLayout pipelineLayout;
16495 DescriptorSet descriptorSet;
16496 };
16497 static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
16498
16499 struct ObjectTableVertexBufferEntryNVX
16500 {
16501 ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
16502 : type( type_ )
16503 , flags( flags_ )
16504 , buffer( buffer_ )
16505 {
16506 }
16507
16508 ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
16509 {
16510 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
16511 }
16512
16513 ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
16514 {
16515 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
16516 return *this;
16517 }
16518
16519 ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
16520 {
16521 type = type_;
16522 return *this;
16523 }
16524
16525 ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16526 {
16527 flags = flags_;
16528 return *this;
16529 }
16530
16531 ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
16532 {
16533 buffer = buffer_;
16534 return *this;
16535 }
16536
16537 operator const VkObjectTableVertexBufferEntryNVX&() const
16538 {
16539 return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
16540 }
16541
16542 bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
16543 {
16544 return ( type == rhs.type )
16545 && ( flags == rhs.flags )
16546 && ( buffer == rhs.buffer );
16547 }
16548
16549 bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
16550 {
16551 return !operator==( rhs );
16552 }
16553
16554 ObjectEntryTypeNVX type;
16555 ObjectEntryUsageFlagsNVX flags;
16556 Buffer buffer;
16557 };
16558 static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
16559
16560 struct ObjectTableIndexBufferEntryNVX
16561 {
16562 ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
16563 : type( type_ )
16564 , flags( flags_ )
16565 , buffer( buffer_ )
16566 {
16567 }
16568
16569 ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
16570 {
16571 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
16572 }
16573
16574 ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
16575 {
16576 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
16577 return *this;
16578 }
16579
16580 ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
16581 {
16582 type = type_;
16583 return *this;
16584 }
16585
16586 ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16587 {
16588 flags = flags_;
16589 return *this;
16590 }
16591
16592 ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
16593 {
16594 buffer = buffer_;
16595 return *this;
16596 }
16597
16598 operator const VkObjectTableIndexBufferEntryNVX&() const
16599 {
16600 return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
16601 }
16602
16603 bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
16604 {
16605 return ( type == rhs.type )
16606 && ( flags == rhs.flags )
16607 && ( buffer == rhs.buffer );
16608 }
16609
16610 bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
16611 {
16612 return !operator==( rhs );
16613 }
16614
16615 ObjectEntryTypeNVX type;
16616 ObjectEntryUsageFlagsNVX flags;
16617 Buffer buffer;
16618 };
16619 static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
16620
16621 struct ObjectTablePushConstantEntryNVX
16622 {
16623 ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
16624 : type( type_ )
16625 , flags( flags_ )
16626 , pipelineLayout( pipelineLayout_ )
16627 , stageFlags( stageFlags_ )
16628 {
16629 }
16630
16631 ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
16632 {
16633 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
16634 }
16635
16636 ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
16637 {
16638 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
16639 return *this;
16640 }
16641
16642 ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
16643 {
16644 type = type_;
16645 return *this;
16646 }
16647
16648 ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16649 {
16650 flags = flags_;
16651 return *this;
16652 }
16653
16654 ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
16655 {
16656 pipelineLayout = pipelineLayout_;
16657 return *this;
16658 }
16659
16660 ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
16661 {
16662 stageFlags = stageFlags_;
16663 return *this;
16664 }
16665
16666 operator const VkObjectTablePushConstantEntryNVX&() const
16667 {
16668 return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
16669 }
16670
16671 bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
16672 {
16673 return ( type == rhs.type )
16674 && ( flags == rhs.flags )
16675 && ( pipelineLayout == rhs.pipelineLayout )
16676 && ( stageFlags == rhs.stageFlags );
16677 }
16678
16679 bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
16680 {
16681 return !operator==( rhs );
16682 }
16683
16684 ObjectEntryTypeNVX type;
16685 ObjectEntryUsageFlagsNVX flags;
16686 PipelineLayout pipelineLayout;
16687 ShaderStageFlags stageFlags;
16688 };
16689 static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
16690
16691 VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
16692 {
16693 return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
16694 }
16695
16696#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16697 template <typename Allocator = std::allocator<LayerProperties>>
16698 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
16699 {
16700 std::vector<LayerProperties,Allocator> properties;
16701 uint32_t propertyCount;
16702 Result result;
16703 do
16704 {
16705 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
16706 if ( ( result == Result::eSuccess ) && propertyCount )
16707 {
16708 properties.resize( propertyCount );
16709 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
16710 }
16711 } while ( result == Result::eIncomplete );
16712 assert( propertyCount <= properties.size() );
16713 properties.resize( propertyCount );
16714 return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
16715 }
16716#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16717
16718 VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
16719 {
16720 return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
16721 }
16722
16723#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16724 template <typename Allocator = std::allocator<ExtensionProperties>>
16725 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
16726 {
16727 std::vector<ExtensionProperties,Allocator> properties;
16728 uint32_t propertyCount;
16729 Result result;
16730 do
16731 {
16732 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
16733 if ( ( result == Result::eSuccess ) && propertyCount )
16734 {
16735 properties.resize( propertyCount );
16736 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
16737 }
16738 } while ( result == Result::eIncomplete );
16739 assert( propertyCount <= properties.size() );
16740 properties.resize( propertyCount );
16741 return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
16742 }
16743#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16744
16745 // forward declarations
16746 struct CmdProcessCommandsInfoNVX;
16747
Lenny Komowbed9b5c2016-08-11 11:23:15 -060016748 class CommandBuffer
16749 {
16750 public:
16751 CommandBuffer()
16752 : m_commandBuffer(VK_NULL_HANDLE)
16753 {}
16754
16755#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
16756 CommandBuffer(VkCommandBuffer commandBuffer)
16757 : m_commandBuffer(commandBuffer)
16758 {}
16759
16760 CommandBuffer& operator=(VkCommandBuffer commandBuffer)
16761 {
16762 m_commandBuffer = commandBuffer;
16763 return *this;
16764 }
16765#endif
16766
Lenny Komowebf33162016-08-26 14:10:08 -060016767 bool operator==(CommandBuffer const &rhs) const
16768 {
16769 return m_commandBuffer == rhs.m_commandBuffer;
16770 }
16771
16772 bool operator!=(CommandBuffer const &rhs) const
16773 {
16774 return m_commandBuffer != rhs.m_commandBuffer;
16775 }
16776
16777 bool operator<(CommandBuffer const &rhs) const
16778 {
16779 return m_commandBuffer < rhs.m_commandBuffer;
16780 }
16781
Lenny Komowbed9b5c2016-08-11 11:23:15 -060016782 Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
16783 {
16784 return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
16785 }
16786
16787#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16788 ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
16789 {
16790 Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
16791 return createResultValue( result, "vk::CommandBuffer::begin" );
16792 }
16793#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16794
16795#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16796 Result end( ) const
16797 {
16798 return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
16799 }
16800#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16801
16802#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16803 ResultValueType<void>::type end() const
16804 {
16805 Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
16806 return createResultValue( result, "vk::CommandBuffer::end" );
16807 }
16808#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16809
16810#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16811 Result reset( CommandBufferResetFlags flags ) const
16812 {
16813 return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
16814 }
16815#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16816
16817#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16818 ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
16819 {
16820 Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
16821 return createResultValue( result, "vk::CommandBuffer::reset" );
16822 }
16823#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16824
16825#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16826 void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
16827 {
16828 vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
16829 }
16830#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16831
16832#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16833 void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
16834 {
16835 vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
16836 }
16837#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16838
16839 void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
16840 {
16841 vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
16842 }
16843
16844#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16845 void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
16846 {
16847 vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
16848 }
16849#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16850
16851 void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
16852 {
16853 vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
16854 }
16855
16856#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16857 void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
16858 {
16859 vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
16860 }
16861#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16862
16863#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16864 void setLineWidth( float lineWidth ) const
16865 {
16866 vkCmdSetLineWidth( m_commandBuffer, lineWidth );
16867 }
16868#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16869
16870#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16871 void setLineWidth( float lineWidth ) const
16872 {
16873 vkCmdSetLineWidth( m_commandBuffer, lineWidth );
16874 }
16875#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16876
16877#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16878 void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
16879 {
16880 vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
16881 }
16882#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16883
16884#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16885 void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
16886 {
16887 vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
16888 }
16889#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16890
16891#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16892 void setBlendConstants( const float blendConstants[4] ) const
16893 {
16894 vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
16895 }
16896#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16897
16898#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16899 void setBlendConstants( const float blendConstants[4] ) const
16900 {
16901 vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
16902 }
16903#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16904
16905#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16906 void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
16907 {
16908 vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
16909 }
16910#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16911
16912#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16913 void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
16914 {
16915 vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
16916 }
16917#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16918
16919#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16920 void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
16921 {
16922 vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
16923 }
16924#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16925
16926#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16927 void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
16928 {
16929 vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
16930 }
16931#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16932
16933#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16934 void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
16935 {
16936 vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
16937 }
16938#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16939
16940#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16941 void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
16942 {
16943 vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
16944 }
16945#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16946
16947#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16948 void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
16949 {
16950 vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
16951 }
16952#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16953
16954#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16955 void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
16956 {
16957 vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
16958 }
16959#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16960
16961 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
16962 {
16963 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
16964 }
16965
16966#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16967 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
16968 {
16969 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
16970 }
16971#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16972
16973#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
16974 void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
16975 {
16976 vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
16977 }
16978#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16979
16980#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16981 void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
16982 {
16983 vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
16984 }
16985#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
16986
16987 void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
16988 {
16989 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
16990 }
16991
16992#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
16993 void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
16994 {
16995#ifdef VULKAN_HPP_NO_EXCEPTIONS
16996 assert( buffers.size() == offsets.size() );
16997#else
16998 if ( buffers.size() != offsets.size() )
16999 {
17000 throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
17001 }
17002#endif // VULKAN_HPP_NO_EXCEPTIONS
17003 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
17004 }
17005#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17006
17007#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17008 void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
17009 {
17010 vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
17011 }
17012#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17013
17014#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17015 void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
17016 {
17017 vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
17018 }
17019#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17020
17021#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17022 void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
17023 {
17024 vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
17025 }
17026#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17027
17028#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17029 void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
17030 {
17031 vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
17032 }
17033#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17034
17035#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17036 void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17037 {
17038 vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17039 }
17040#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17041
17042#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17043 void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17044 {
17045 vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17046 }
17047#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17048
17049#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17050 void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17051 {
17052 vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17053 }
17054#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17055
17056#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17057 void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17058 {
17059 vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17060 }
17061#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17062
17063#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17064 void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
17065 {
17066 vkCmdDispatch( m_commandBuffer, x, y, z );
17067 }
17068#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17069
17070#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17071 void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
17072 {
17073 vkCmdDispatch( m_commandBuffer, x, y, z );
17074 }
17075#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17076
17077#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17078 void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
17079 {
17080 vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
17081 }
17082#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17083
17084#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17085 void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
17086 {
17087 vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
17088 }
17089#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17090
17091 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
17092 {
17093 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
17094 }
17095
17096#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17097 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
17098 {
17099 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
17100 }
17101#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17102
17103 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
17104 {
17105 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
17106 }
17107
17108#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17109 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
17110 {
17111 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
17112 }
17113#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17114
17115 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
17116 {
17117 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
17118 }
17119
17120#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17121 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
17122 {
17123 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
17124 }
17125#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17126
17127 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
17128 {
17129 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
17130 }
17131
17132#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17133 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
17134 {
17135 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
17136 }
17137#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17138
17139 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
17140 {
17141 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
17142 }
17143
17144#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17145 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
17146 {
17147 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
17148 }
17149#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17150
17151 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
17152 {
17153 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
17154 }
17155
17156#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17157 template <typename T>
17158 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
17159 {
17160 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
17161 }
17162#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17163
17164#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17165 void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
17166 {
17167 vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
17168 }
17169#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17170
17171#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17172 void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
17173 {
17174 vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
17175 }
17176#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17177
17178 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
17179 {
17180 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
17181 }
17182
17183#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17184 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
17185 {
17186 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
17187 }
17188#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17189
17190 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
17191 {
17192 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
17193 }
17194
17195#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17196 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
17197 {
17198 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
17199 }
17200#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17201
17202 void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
17203 {
17204 vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
17205 }
17206
17207#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17208 void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
17209 {
17210 vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
17211 }
17212#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17213
17214 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
17215 {
17216 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
17217 }
17218
17219#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17220 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
17221 {
17222 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
17223 }
17224#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17225
17226#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17227 void setEvent( Event event, PipelineStageFlags stageMask ) const
17228 {
17229 vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
17230 }
17231#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17232
17233#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17234 void setEvent( Event event, PipelineStageFlags stageMask ) const
17235 {
17236 vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
17237 }
17238#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17239
17240#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17241 void resetEvent( Event event, PipelineStageFlags stageMask ) const
17242 {
17243 vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
17244 }
17245#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17246
17247#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17248 void resetEvent( Event event, PipelineStageFlags stageMask ) const
17249 {
17250 vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
17251 }
17252#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17253
17254 void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
17255 {
17256 vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
17257 }
17258
17259#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17260 void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
17261 {
17262 vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
17263 }
17264#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17265
17266 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
17267 {
17268 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
17269 }
17270
17271#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17272 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
17273 {
17274 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
17275 }
17276#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17277
17278#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17279 void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
17280 {
17281 vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
17282 }
17283#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17284
17285#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17286 void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
17287 {
17288 vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
17289 }
17290#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17291
17292#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17293 void endQuery( QueryPool queryPool, uint32_t query ) const
17294 {
17295 vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
17296 }
17297#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17298
17299#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17300 void endQuery( QueryPool queryPool, uint32_t query ) const
17301 {
17302 vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
17303 }
17304#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17305
17306#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17307 void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
17308 {
17309 vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
17310 }
17311#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17312
17313#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17314 void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
17315 {
17316 vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
17317 }
17318#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17319
17320#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17321 void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
17322 {
17323 vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
17324 }
17325#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17326
17327#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17328 void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
17329 {
17330 vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
17331 }
17332#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17333
17334#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17335 void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
17336 {
17337 vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
17338 }
17339#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17340
17341#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17342 void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
17343 {
17344 vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
17345 }
17346#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17347
17348 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
17349 {
17350 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
17351 }
17352
17353#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17354 template <typename T>
17355 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
17356 {
17357 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
17358 }
17359#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17360
17361 void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
17362 {
17363 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
17364 }
17365
17366#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17367 void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
17368 {
17369 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
17370 }
17371#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17372
17373#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17374 void nextSubpass( SubpassContents contents ) const
17375 {
17376 vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
17377 }
17378#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17379
17380#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17381 void nextSubpass( SubpassContents contents ) const
17382 {
17383 vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
17384 }
17385#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17386
17387#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17388 void endRenderPass( ) const
17389 {
17390 vkCmdEndRenderPass( m_commandBuffer );
17391 }
17392#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17393
17394#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17395 void endRenderPass() const
17396 {
17397 vkCmdEndRenderPass( m_commandBuffer );
17398 }
17399#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17400
17401 void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
17402 {
17403 vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
17404 }
17405
17406#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17407 void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
17408 {
17409 vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
17410 }
17411#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17412
17413 void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
17414 {
17415 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
17416 }
17417
17418#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17419 DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
17420 {
17421 DebugMarkerMarkerInfoEXT markerInfo;
17422 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
17423 return markerInfo;
17424 }
17425#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17426
17427#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17428 void debugMarkerEndEXT( ) const
17429 {
17430 vkCmdDebugMarkerEndEXT( m_commandBuffer );
17431 }
17432#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17433
17434#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17435 void debugMarkerEndEXT() const
17436 {
17437 vkCmdDebugMarkerEndEXT( m_commandBuffer );
17438 }
17439#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17440
17441 void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
17442 {
17443 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
17444 }
17445
17446#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17447 DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
17448 {
17449 DebugMarkerMarkerInfoEXT markerInfo;
17450 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
17451 return markerInfo;
17452 }
17453#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17454
Lenny Komow6501c122016-08-31 15:03:49 -060017455#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17456 void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
17457 {
17458 vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
17459 }
17460#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17461
17462#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17463 void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
17464 {
17465 vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
17466 }
17467#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17468
17469#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17470 void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
17471 {
17472 vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
17473 }
17474#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17475
17476#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17477 void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
17478 {
17479 vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
17480 }
17481#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17482
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017483 void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
17484 {
17485 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
17486 }
17487
17488#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17489 void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
17490 {
17491 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
17492 }
17493#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17494
17495 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
17496 {
17497 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
17498 }
17499
17500#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17501 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
17502 {
17503 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
17504 }
17505#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17506
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017507#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17508 explicit
17509#endif
17510 operator VkCommandBuffer() const
17511 {
17512 return m_commandBuffer;
17513 }
17514
17515 explicit operator bool() const
17516 {
17517 return m_commandBuffer != VK_NULL_HANDLE;
17518 }
17519
17520 bool operator!() const
17521 {
17522 return m_commandBuffer == VK_NULL_HANDLE;
17523 }
17524
17525 private:
17526 VkCommandBuffer m_commandBuffer;
17527 };
17528 static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
17529
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017530 struct SubmitInfo
17531 {
17532 SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
17533 : sType( StructureType::eSubmitInfo )
17534 , pNext( nullptr )
17535 , waitSemaphoreCount( waitSemaphoreCount_ )
17536 , pWaitSemaphores( pWaitSemaphores_ )
17537 , pWaitDstStageMask( pWaitDstStageMask_ )
17538 , commandBufferCount( commandBufferCount_ )
17539 , pCommandBuffers( pCommandBuffers_ )
17540 , signalSemaphoreCount( signalSemaphoreCount_ )
17541 , pSignalSemaphores( pSignalSemaphores_ )
17542 {
17543 }
17544
17545 SubmitInfo( VkSubmitInfo const & rhs )
17546 {
17547 memcpy( this, &rhs, sizeof(SubmitInfo) );
17548 }
17549
17550 SubmitInfo& operator=( VkSubmitInfo const & rhs )
17551 {
17552 memcpy( this, &rhs, sizeof(SubmitInfo) );
17553 return *this;
17554 }
17555
17556 SubmitInfo& setSType( StructureType sType_ )
17557 {
17558 sType = sType_;
17559 return *this;
17560 }
17561
17562 SubmitInfo& setPNext( const void* pNext_ )
17563 {
17564 pNext = pNext_;
17565 return *this;
17566 }
17567
17568 SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
17569 {
17570 waitSemaphoreCount = waitSemaphoreCount_;
17571 return *this;
17572 }
17573
17574 SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
17575 {
17576 pWaitSemaphores = pWaitSemaphores_;
17577 return *this;
17578 }
17579
17580 SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
17581 {
17582 pWaitDstStageMask = pWaitDstStageMask_;
17583 return *this;
17584 }
17585
17586 SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
17587 {
17588 commandBufferCount = commandBufferCount_;
17589 return *this;
17590 }
17591
17592 SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
17593 {
17594 pCommandBuffers = pCommandBuffers_;
17595 return *this;
17596 }
17597
17598 SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
17599 {
17600 signalSemaphoreCount = signalSemaphoreCount_;
17601 return *this;
17602 }
17603
17604 SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
17605 {
17606 pSignalSemaphores = pSignalSemaphores_;
17607 return *this;
17608 }
17609
17610 operator const VkSubmitInfo&() const
17611 {
17612 return *reinterpret_cast<const VkSubmitInfo*>(this);
17613 }
17614
17615 bool operator==( SubmitInfo const& rhs ) const
17616 {
17617 return ( sType == rhs.sType )
17618 && ( pNext == rhs.pNext )
17619 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
17620 && ( pWaitSemaphores == rhs.pWaitSemaphores )
17621 && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
17622 && ( commandBufferCount == rhs.commandBufferCount )
17623 && ( pCommandBuffers == rhs.pCommandBuffers )
17624 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
17625 && ( pSignalSemaphores == rhs.pSignalSemaphores );
17626 }
17627
17628 bool operator!=( SubmitInfo const& rhs ) const
17629 {
17630 return !operator==( rhs );
17631 }
17632
17633 private:
17634 StructureType sType;
17635
17636 public:
17637 const void* pNext;
17638 uint32_t waitSemaphoreCount;
17639 const Semaphore* pWaitSemaphores;
17640 const PipelineStageFlags* pWaitDstStageMask;
17641 uint32_t commandBufferCount;
17642 const CommandBuffer* pCommandBuffers;
17643 uint32_t signalSemaphoreCount;
17644 const Semaphore* pSignalSemaphores;
17645 };
17646 static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
17647
17648 class Queue
17649 {
17650 public:
17651 Queue()
17652 : m_queue(VK_NULL_HANDLE)
17653 {}
17654
17655#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17656 Queue(VkQueue queue)
17657 : m_queue(queue)
17658 {}
17659
17660 Queue& operator=(VkQueue queue)
17661 {
17662 m_queue = queue;
17663 return *this;
17664 }
17665#endif
17666
Lenny Komowebf33162016-08-26 14:10:08 -060017667 bool operator==(Queue const &rhs) const
17668 {
17669 return m_queue == rhs.m_queue;
17670 }
17671
17672 bool operator!=(Queue const &rhs) const
17673 {
17674 return m_queue != rhs.m_queue;
17675 }
17676
17677 bool operator<(Queue const &rhs) const
17678 {
17679 return m_queue < rhs.m_queue;
17680 }
17681
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017682 Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
17683 {
17684 return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
17685 }
17686
17687#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17688 ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
17689 {
17690 Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
17691 return createResultValue( result, "vk::Queue::submit" );
17692 }
17693#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17694
17695#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17696 Result waitIdle( ) const
17697 {
17698 return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
17699 }
17700#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17701
17702#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17703 ResultValueType<void>::type waitIdle() const
17704 {
17705 Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
17706 return createResultValue( result, "vk::Queue::waitIdle" );
17707 }
17708#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17709
17710 Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
17711 {
17712 return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
17713 }
17714
17715#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17716 ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
17717 {
17718 Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
17719 return createResultValue( result, "vk::Queue::bindSparse" );
17720 }
17721#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17722
17723 Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
17724 {
17725 return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
17726 }
17727
17728#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17729 Result presentKHR( const PresentInfoKHR & presentInfo ) const
17730 {
17731 Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
17732 return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
17733 }
17734#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17735
17736#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17737 explicit
17738#endif
17739 operator VkQueue() const
17740 {
17741 return m_queue;
17742 }
17743
17744 explicit operator bool() const
17745 {
17746 return m_queue != VK_NULL_HANDLE;
17747 }
17748
17749 bool operator!() const
17750 {
17751 return m_queue == VK_NULL_HANDLE;
17752 }
17753
17754 private:
17755 VkQueue m_queue;
17756 };
17757 static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
17758
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017759 class Device
17760 {
17761 public:
17762 Device()
17763 : m_device(VK_NULL_HANDLE)
17764 {}
17765
17766#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17767 Device(VkDevice device)
17768 : m_device(device)
17769 {}
17770
17771 Device& operator=(VkDevice device)
17772 {
17773 m_device = device;
17774 return *this;
17775 }
17776#endif
17777
Lenny Komowebf33162016-08-26 14:10:08 -060017778 bool operator==(Device const &rhs) const
17779 {
17780 return m_device == rhs.m_device;
17781 }
17782
17783 bool operator!=(Device const &rhs) const
17784 {
17785 return m_device != rhs.m_device;
17786 }
17787
17788 bool operator<(Device const &rhs) const
17789 {
17790 return m_device < rhs.m_device;
17791 }
17792
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017793 PFN_vkVoidFunction getProcAddr( const char* pName ) const
17794 {
17795 return vkGetDeviceProcAddr( m_device, pName );
17796 }
17797
17798#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17799 PFN_vkVoidFunction getProcAddr( const std::string & name ) const
17800 {
17801 return vkGetDeviceProcAddr( m_device, name.c_str() );
17802 }
17803#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17804
17805 void destroy( const AllocationCallbacks* pAllocator ) const
17806 {
17807 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
17808 }
17809
17810#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17811 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const
17812 {
17813 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
17814 }
17815#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17816
17817 void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const
17818 {
17819 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
17820 }
17821
17822#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17823 Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
17824 {
17825 Queue queue;
17826 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
17827 return queue;
17828 }
17829#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17830
17831#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17832 Result waitIdle( ) const
17833 {
17834 return static_cast<Result>( vkDeviceWaitIdle( m_device ) );
17835 }
17836#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17837
17838#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17839 ResultValueType<void>::type waitIdle() const
17840 {
17841 Result result = static_cast<Result>( vkDeviceWaitIdle( m_device ) );
17842 return createResultValue( result, "vk::Device::waitIdle" );
17843 }
17844#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17845
17846 Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const
17847 {
17848 return static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
17849 }
17850
17851#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17852 ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
17853 {
17854 DeviceMemory memory;
17855 Result result = static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
17856 return createResultValue( result, memory, "vk::Device::allocateMemory" );
17857 }
17858#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17859
17860 void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const
17861 {
17862 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
17863 }
17864
17865#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17866 void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr ) const
17867 {
17868 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
17869 }
17870#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17871
17872#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17873 Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const
17874 {
17875 return static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
17876 }
17877#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17878
17879#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17880 ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags() ) const
17881 {
17882 void* pData;
17883 Result result = static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
17884 return createResultValue( result, pData, "vk::Device::mapMemory" );
17885 }
17886#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17887
17888#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17889 void unmapMemory( DeviceMemory memory ) const
17890 {
17891 vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
17892 }
17893#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17894
17895#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17896 void unmapMemory( DeviceMemory memory ) const
17897 {
17898 vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
17899 }
17900#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17901
17902 Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
17903 {
17904 return static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
17905 }
17906
17907#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17908 ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
17909 {
17910 Result result = static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
17911 return createResultValue( result, "vk::Device::flushMappedMemoryRanges" );
17912 }
17913#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17914
17915 Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
17916 {
17917 return static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
17918 }
17919
17920#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17921 ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
17922 {
17923 Result result = static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
17924 return createResultValue( result, "vk::Device::invalidateMappedMemoryRanges" );
17925 }
17926#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17927
17928 void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const
17929 {
17930 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
17931 }
17932
17933#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17934 DeviceSize getMemoryCommitment( DeviceMemory memory ) const
17935 {
17936 DeviceSize committedMemoryInBytes;
17937 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
17938 return committedMemoryInBytes;
17939 }
17940#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17941
17942 void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const
17943 {
17944 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
17945 }
17946
17947#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17948 MemoryRequirements getBufferMemoryRequirements( Buffer buffer ) const
17949 {
17950 MemoryRequirements memoryRequirements;
17951 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
17952 return memoryRequirements;
17953 }
17954#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17955
17956#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17957 Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
17958 {
17959 return static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
17960 }
17961#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17962
17963#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17964 ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
17965 {
17966 Result result = static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
17967 return createResultValue( result, "vk::Device::bindBufferMemory" );
17968 }
17969#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17970
17971 void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const
17972 {
17973 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
17974 }
17975
17976#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17977 MemoryRequirements getImageMemoryRequirements( Image image ) const
17978 {
17979 MemoryRequirements memoryRequirements;
17980 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
17981 return memoryRequirements;
17982 }
17983#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17984
17985#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17986 Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
17987 {
17988 return static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
17989 }
17990#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17991
17992#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17993 ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
17994 {
17995 Result result = static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
17996 return createResultValue( result, "vk::Device::bindImageMemory" );
17997 }
17998#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17999
18000 void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const
18001 {
18002 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
18003 }
18004
18005#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18006 template <typename Allocator = std::allocator<SparseImageMemoryRequirements>>
18007 std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image ) const
18008 {
18009 std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
18010 uint32_t sparseMemoryRequirementCount;
18011 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
18012 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
18013 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
18014 return sparseMemoryRequirements;
18015 }
18016#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18017
18018 Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
18019 {
18020 return static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
18021 }
18022
18023#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18024 ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18025 {
18026 Fence fence;
18027 Result result = static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFence*>( &fence ) ) );
18028 return createResultValue( result, fence, "vk::Device::createFence" );
18029 }
18030#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18031
18032 void destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const
18033 {
18034 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18035 }
18036
18037#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18038 void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr ) const
18039 {
18040 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18041 }
18042#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18043
18044 Result resetFences( uint32_t fenceCount, const Fence* pFences ) const
18045 {
18046 return static_cast<Result>( vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
18047 }
18048
18049#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18050 ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences ) const
18051 {
18052 Result result = static_cast<Result>( vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
18053 return createResultValue( result, "vk::Device::resetFences" );
18054 }
18055#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18056
18057#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18058 Result getFenceStatus( Fence fence ) const
18059 {
18060 return static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
18061 }
18062#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18063
18064#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18065 Result getFenceStatus( Fence fence ) const
18066 {
18067 Result result = static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
18068 return createResultValue( result, "vk::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
18069 }
18070#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18071
18072 Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const
18073 {
18074 return static_cast<Result>( vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
18075 }
18076
18077#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18078 Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const
18079 {
18080 Result result = static_cast<Result>( vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
18081 return createResultValue( result, "vk::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
18082 }
18083#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18084
18085 Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const
18086 {
18087 return static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
18088 }
18089
18090#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18091 ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18092 {
18093 Semaphore semaphore;
18094 Result result = static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
18095 return createResultValue( result, semaphore, "vk::Device::createSemaphore" );
18096 }
18097#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18098
18099 void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const
18100 {
18101 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18102 }
18103
18104#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18105 void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr ) const
18106 {
18107 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18108 }
18109#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18110
18111 Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const
18112 {
18113 return static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
18114 }
18115
18116#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18117 ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18118 {
18119 Event event;
18120 Result result = static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkEvent*>( &event ) ) );
18121 return createResultValue( result, event, "vk::Device::createEvent" );
18122 }
18123#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18124
18125 void destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const
18126 {
18127 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18128 }
18129
18130#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18131 void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr ) const
18132 {
18133 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18134 }
18135#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18136
18137#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18138 Result getEventStatus( Event event ) const
18139 {
18140 return static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
18141 }
18142#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18143
18144#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18145 Result getEventStatus( Event event ) const
18146 {
18147 Result result = static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
18148 return createResultValue( result, "vk::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
18149 }
18150#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18151
18152#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18153 Result setEvent( Event event ) const
18154 {
18155 return static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
18156 }
18157#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18158
18159#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18160 ResultValueType<void>::type setEvent( Event event ) const
18161 {
18162 Result result = static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
18163 return createResultValue( result, "vk::Device::setEvent" );
18164 }
18165#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18166
18167#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18168 Result resetEvent( Event event ) const
18169 {
18170 return static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
18171 }
18172#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18173
18174#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18175 ResultValueType<void>::type resetEvent( Event event ) const
18176 {
18177 Result result = static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
18178 return createResultValue( result, "vk::Device::resetEvent" );
18179 }
18180#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18181
18182 Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const
18183 {
18184 return static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
18185 }
18186
18187#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18188 ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18189 {
18190 QueryPool queryPool;
18191 Result result = static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
18192 return createResultValue( result, queryPool, "vk::Device::createQueryPool" );
18193 }
18194#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18195
18196 void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const
18197 {
18198 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18199 }
18200
18201#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18202 void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
18203 {
18204 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18205 }
18206#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18207
18208 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const
18209 {
18210 return static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
18211 }
18212
18213#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18214 template <typename T>
18215 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const
18216 {
18217 Result result = static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
18218 return createResultValue( result, "vk::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
18219 }
18220#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18221
18222 Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const
18223 {
18224 return static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
18225 }
18226
18227#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18228 ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18229 {
18230 Buffer buffer;
18231 Result result = static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBuffer*>( &buffer ) ) );
18232 return createResultValue( result, buffer, "vk::Device::createBuffer" );
18233 }
18234#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18235
18236 void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const
18237 {
18238 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18239 }
18240
18241#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18242 void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr ) const
18243 {
18244 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18245 }
18246#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18247
18248 Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const
18249 {
18250 return static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
18251 }
18252
18253#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18254 ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18255 {
18256 BufferView view;
18257 Result result = static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBufferView*>( &view ) ) );
18258 return createResultValue( result, view, "vk::Device::createBufferView" );
18259 }
18260#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18261
18262 void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const
18263 {
18264 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18265 }
18266
18267#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18268 void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr ) const
18269 {
18270 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18271 }
18272#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18273
18274 Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const
18275 {
18276 return static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
18277 }
18278
18279#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18280 ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18281 {
18282 Image image;
18283 Result result = static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImage*>( &image ) ) );
18284 return createResultValue( result, image, "vk::Device::createImage" );
18285 }
18286#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18287
18288 void destroyImage( Image image, const AllocationCallbacks* pAllocator ) const
18289 {
18290 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18291 }
18292
18293#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18294 void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr ) const
18295 {
18296 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18297 }
18298#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18299
18300 void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const
18301 {
18302 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
18303 }
18304
18305#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18306 SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const
18307 {
18308 SubresourceLayout layout;
18309 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
18310 return layout;
18311 }
18312#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18313
18314 Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const
18315 {
18316 return static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
18317 }
18318
18319#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18320 ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18321 {
18322 ImageView view;
18323 Result result = static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImageView*>( &view ) ) );
18324 return createResultValue( result, view, "vk::Device::createImageView" );
18325 }
18326#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18327
18328 void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const
18329 {
18330 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18331 }
18332
18333#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18334 void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr ) const
18335 {
18336 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18337 }
18338#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18339
18340 Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const
18341 {
18342 return static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
18343 }
18344
18345#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18346 ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18347 {
18348 ShaderModule shaderModule;
18349 Result result = static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
18350 return createResultValue( result, shaderModule, "vk::Device::createShaderModule" );
18351 }
18352#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18353
18354 void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const
18355 {
18356 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18357 }
18358
18359#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18360 void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr ) const
18361 {
18362 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18363 }
18364#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18365
18366 Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const
18367 {
18368 return static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
18369 }
18370
18371#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18372 ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18373 {
18374 PipelineCache pipelineCache;
18375 Result result = static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
18376 return createResultValue( result, pipelineCache, "vk::Device::createPipelineCache" );
18377 }
18378#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18379
18380 void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const
18381 {
18382 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18383 }
18384
18385#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18386 void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr ) const
18387 {
18388 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18389 }
18390#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18391
18392 Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const
18393 {
18394 return static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
18395 }
18396
18397#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18398 template <typename Allocator = std::allocator<uint8_t>>
18399 typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache ) const
18400 {
18401 std::vector<uint8_t,Allocator> data;
18402 size_t dataSize;
18403 Result result;
18404 do
18405 {
18406 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
18407 if ( ( result == Result::eSuccess ) && dataSize )
18408 {
18409 data.resize( dataSize );
18410 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
18411 }
18412 } while ( result == Result::eIncomplete );
18413 assert( dataSize <= data.size() );
18414 data.resize( dataSize );
18415 return createResultValue( result, data, "vk::Device::getPipelineCacheData" );
18416 }
18417#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18418
18419 Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const
18420 {
18421 return static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
18422 }
18423
18424#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18425 ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const
18426 {
18427 Result result = static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
18428 return createResultValue( result, "vk::Device::mergePipelineCaches" );
18429 }
18430#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18431
18432 Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
18433 {
18434 return static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
18435 }
18436
18437#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18438 template <typename Allocator = std::allocator<Pipeline>>
18439 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
18440 {
18441 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
18442 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
18443 return createResultValue( result, pipelines, "vk::Device::createGraphicsPipelines" );
18444 }
18445
18446 ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18447 {
18448 Pipeline pipeline;
18449 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
18450 return createResultValue( result, pipeline, "vk::Device::createGraphicsPipeline" );
18451 }
18452#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18453
18454 Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
18455 {
18456 return static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
18457 }
18458
18459#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18460 template <typename Allocator = std::allocator<Pipeline>>
18461 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
18462 {
18463 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
18464 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
18465 return createResultValue( result, pipelines, "vk::Device::createComputePipelines" );
18466 }
18467
18468 ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18469 {
18470 Pipeline pipeline;
18471 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
18472 return createResultValue( result, pipeline, "vk::Device::createComputePipeline" );
18473 }
18474#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18475
18476 void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const
18477 {
18478 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18479 }
18480
18481#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18482 void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr ) const
18483 {
18484 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18485 }
18486#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18487
18488 Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const
18489 {
18490 return static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
18491 }
18492
18493#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18494 ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18495 {
18496 PipelineLayout pipelineLayout;
18497 Result result = static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
18498 return createResultValue( result, pipelineLayout, "vk::Device::createPipelineLayout" );
18499 }
18500#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18501
18502 void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const
18503 {
18504 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18505 }
18506
18507#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18508 void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
18509 {
18510 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18511 }
18512#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18513
18514 Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const
18515 {
18516 return static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
18517 }
18518
18519#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18520 ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18521 {
18522 Sampler sampler;
18523 Result result = static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSampler*>( &sampler ) ) );
18524 return createResultValue( result, sampler, "vk::Device::createSampler" );
18525 }
18526#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18527
18528 void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const
18529 {
18530 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18531 }
18532
18533#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18534 void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr ) const
18535 {
18536 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18537 }
18538#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18539
18540 Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const
18541 {
18542 return static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
18543 }
18544
18545#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18546 ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18547 {
18548 DescriptorSetLayout setLayout;
18549 Result result = static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
18550 return createResultValue( result, setLayout, "vk::Device::createDescriptorSetLayout" );
18551 }
18552#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18553
18554 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const
18555 {
18556 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18557 }
18558
18559#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18560 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
18561 {
18562 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18563 }
18564#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18565
18566 Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const
18567 {
18568 return static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
18569 }
18570
18571#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18572 ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18573 {
18574 DescriptorPool descriptorPool;
18575 Result result = static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
18576 return createResultValue( result, descriptorPool, "vk::Device::createDescriptorPool" );
18577 }
18578#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18579
18580 void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const
18581 {
18582 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18583 }
18584
18585#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18586 void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
18587 {
18588 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18589 }
18590#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18591
18592#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18593 Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
18594 {
18595 return static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
18596 }
18597#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18598
18599#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18600 ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags() ) const
18601 {
18602 Result result = static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
18603 return createResultValue( result, "vk::Device::resetDescriptorPool" );
18604 }
18605#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18606
18607 Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const
18608 {
18609 return static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
18610 }
18611
18612#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18613 template <typename Allocator = std::allocator<DescriptorSet>>
18614 typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const
18615 {
18616 std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
18617 Result result = static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
18618 return createResultValue( result, descriptorSets, "vk::Device::allocateDescriptorSets" );
18619 }
18620#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18621
18622 Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const
18623 {
18624 return static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
18625 }
18626
18627#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18628 ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const
18629 {
18630 Result result = static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
18631 return createResultValue( result, "vk::Device::freeDescriptorSets" );
18632 }
18633#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18634
18635 void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const
18636 {
18637 vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
18638 }
18639
18640#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18641 void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const
18642 {
18643 vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
18644 }
18645#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18646
18647 Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const
18648 {
18649 return static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
18650 }
18651
18652#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18653 ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18654 {
18655 Framebuffer framebuffer;
18656 Result result = static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
18657 return createResultValue( result, framebuffer, "vk::Device::createFramebuffer" );
18658 }
18659#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18660
18661 void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const
18662 {
18663 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18664 }
18665
18666#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18667 void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr ) const
18668 {
18669 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18670 }
18671#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18672
18673 Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const
18674 {
18675 return static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
18676 }
18677
18678#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18679 ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18680 {
18681 RenderPass renderPass;
18682 Result result = static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
18683 return createResultValue( result, renderPass, "vk::Device::createRenderPass" );
18684 }
18685#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18686
18687 void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const
18688 {
18689 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18690 }
18691
18692#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18693 void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr ) const
18694 {
18695 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18696 }
18697#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18698
18699 void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const
18700 {
18701 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
18702 }
18703
18704#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18705 Extent2D getRenderAreaGranularity( RenderPass renderPass ) const
18706 {
18707 Extent2D granularity;
18708 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
18709 return granularity;
18710 }
18711#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18712
18713 Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const
18714 {
18715 return static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
18716 }
18717
18718#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18719 ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18720 {
18721 CommandPool commandPool;
18722 Result result = static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
18723 return createResultValue( result, commandPool, "vk::Device::createCommandPool" );
18724 }
18725#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18726
18727 void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const
18728 {
18729 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18730 }
18731
18732#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18733 void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
18734 {
18735 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18736 }
18737#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18738
18739#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18740 Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
18741 {
18742 return static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
18743 }
18744#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18745
18746#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18747 ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
18748 {
18749 Result result = static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
18750 return createResultValue( result, "vk::Device::resetCommandPool" );
18751 }
18752#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18753
18754 Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const
18755 {
18756 return static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
18757 }
18758
18759#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18760 template <typename Allocator = std::allocator<CommandBuffer>>
18761 typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const
18762 {
18763 std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
18764 Result result = static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
18765 return createResultValue( result, commandBuffers, "vk::Device::allocateCommandBuffers" );
18766 }
18767#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18768
18769 void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
18770 {
18771 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
18772 }
18773
18774#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18775 void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const
18776 {
18777 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
18778 }
18779#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18780
18781 Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const
18782 {
18783 return static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
18784 }
18785
18786#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18787 template <typename Allocator = std::allocator<SwapchainKHR>>
18788 typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
18789 {
18790 std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
18791 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
18792 return createResultValue( result, swapchains, "vk::Device::createSharedSwapchainsKHR" );
18793 }
18794
18795 ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18796 {
18797 SwapchainKHR swapchain;
18798 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
18799 return createResultValue( result, swapchain, "vk::Device::createSharedSwapchainKHR" );
18800 }
18801#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18802
18803 Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const
18804 {
18805 return static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
18806 }
18807
18808#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18809 ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18810 {
18811 SwapchainKHR swapchain;
18812 Result result = static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
18813 return createResultValue( result, swapchain, "vk::Device::createSwapchainKHR" );
18814 }
18815#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18816
18817 void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const
18818 {
18819 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18820 }
18821
18822#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18823 void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr ) const
18824 {
18825 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18826 }
18827#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18828
18829 Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const
18830 {
18831 return static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
18832 }
18833
18834#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18835 template <typename Allocator = std::allocator<Image>>
18836 typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain ) const
18837 {
18838 std::vector<Image,Allocator> swapchainImages;
18839 uint32_t swapchainImageCount;
18840 Result result;
18841 do
18842 {
18843 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
18844 if ( ( result == Result::eSuccess ) && swapchainImageCount )
18845 {
18846 swapchainImages.resize( swapchainImageCount );
18847 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
18848 }
18849 } while ( result == Result::eIncomplete );
18850 assert( swapchainImageCount <= swapchainImages.size() );
18851 swapchainImages.resize( swapchainImageCount );
18852 return createResultValue( result, swapchainImages, "vk::Device::getSwapchainImagesKHR" );
18853 }
18854#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18855
18856 Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const
18857 {
18858 return static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
18859 }
18860
18861#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18862 ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const
18863 {
18864 uint32_t imageIndex;
18865 Result result = static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
18866 return createResultValue( result, imageIndex, "vk::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
18867 }
18868#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18869
18870 Result debugMarkerSetObjectNameEXT( DebugMarkerObjectNameInfoEXT* pNameInfo ) const
18871 {
18872 return static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
18873 }
18874
18875#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18876 ResultValueType<DebugMarkerObjectNameInfoEXT>::type debugMarkerSetObjectNameEXT() const
18877 {
18878 DebugMarkerObjectNameInfoEXT nameInfo;
18879 Result result = static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
18880 return createResultValue( result, nameInfo, "vk::Device::debugMarkerSetObjectNameEXT" );
18881 }
18882#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18883
18884 Result debugMarkerSetObjectTagEXT( DebugMarkerObjectTagInfoEXT* pTagInfo ) const
18885 {
18886 return static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
18887 }
18888
18889#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18890 ResultValueType<DebugMarkerObjectTagInfoEXT>::type debugMarkerSetObjectTagEXT() const
18891 {
18892 DebugMarkerObjectTagInfoEXT tagInfo;
18893 Result result = static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
18894 return createResultValue( result, tagInfo, "vk::Device::debugMarkerSetObjectTagEXT" );
18895 }
18896#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18897
Lenny Komow6501c122016-08-31 15:03:49 -060018898#ifdef VK_USE_PLATFORM_WIN32_KHR
18899 Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const
18900 {
18901 return static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
18902 }
18903#endif /*VK_USE_PLATFORM_WIN32_KHR*/
18904
18905#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18906#ifdef VK_USE_PLATFORM_WIN32_KHR
18907 ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const
18908 {
18909 HANDLE handle;
18910 Result result = static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
18911 return createResultValue( result, handle, "vk::Device::getMemoryWin32HandleNV" );
18912 }
18913#endif /*VK_USE_PLATFORM_WIN32_KHR*/
18914#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18915
Mark Lobodzinski2d589822016-12-12 09:44:34 -070018916 Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
18917 {
18918 return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
18919 }
18920
18921#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18922 ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18923 {
18924 IndirectCommandsLayoutNVX indirectCommandsLayout;
18925 Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
18926 return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
18927 }
18928#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18929
18930 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
18931 {
18932 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18933 }
18934
18935#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18936 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
18937 {
18938 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18939 }
18940#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18941
18942 Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
18943 {
18944 return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
18945 }
18946
18947#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18948 ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18949 {
18950 ObjectTableNVX objectTable;
18951 Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
18952 return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
18953 }
18954#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18955
18956 void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
18957 {
18958 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18959 }
18960
18961#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18962 void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const
18963 {
18964 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18965 }
18966#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18967
18968 Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
18969 {
18970 return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
18971 }
18972
18973#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18974 ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
18975 {
18976#ifdef VULKAN_HPP_NO_EXCEPTIONS
18977 assert( pObjectTableEntries.size() == objectIndices.size() );
18978#else
18979 if ( pObjectTableEntries.size() != objectIndices.size() )
18980 {
18981 throw std::logic_error( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
18982 }
18983#endif // VULKAN_HPP_NO_EXCEPTIONS
18984 Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
18985 return createResultValue( result, "vk::Device::registerObjectsNVX" );
18986 }
18987#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18988
18989 Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
18990 {
18991 return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
18992 }
18993
18994#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18995 ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
18996 {
18997#ifdef VULKAN_HPP_NO_EXCEPTIONS
18998 assert( objectEntryTypes.size() == objectIndices.size() );
18999#else
19000 if ( objectEntryTypes.size() != objectIndices.size() )
19001 {
19002 throw std::logic_error( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
19003 }
19004#endif // VULKAN_HPP_NO_EXCEPTIONS
19005 Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
19006 return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
19007 }
19008#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19009
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019010#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19011 explicit
19012#endif
19013 operator VkDevice() const
19014 {
19015 return m_device;
19016 }
19017
19018 explicit operator bool() const
19019 {
19020 return m_device != VK_NULL_HANDLE;
19021 }
19022
19023 bool operator!() const
19024 {
19025 return m_device == VK_NULL_HANDLE;
19026 }
19027
19028 private:
19029 VkDevice m_device;
19030 };
19031 static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
19032
19033 class PhysicalDevice
19034 {
19035 public:
19036 PhysicalDevice()
19037 : m_physicalDevice(VK_NULL_HANDLE)
19038 {}
19039
19040#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19041 PhysicalDevice(VkPhysicalDevice physicalDevice)
19042 : m_physicalDevice(physicalDevice)
19043 {}
19044
19045 PhysicalDevice& operator=(VkPhysicalDevice physicalDevice)
19046 {
19047 m_physicalDevice = physicalDevice;
19048 return *this;
19049 }
19050#endif
19051
Lenny Komowebf33162016-08-26 14:10:08 -060019052 bool operator==(PhysicalDevice const &rhs) const
19053 {
19054 return m_physicalDevice == rhs.m_physicalDevice;
19055 }
19056
19057 bool operator!=(PhysicalDevice const &rhs) const
19058 {
19059 return m_physicalDevice != rhs.m_physicalDevice;
19060 }
19061
19062 bool operator<(PhysicalDevice const &rhs) const
19063 {
19064 return m_physicalDevice < rhs.m_physicalDevice;
19065 }
19066
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019067 void getProperties( PhysicalDeviceProperties* pProperties ) const
19068 {
19069 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
19070 }
19071
19072#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19073 PhysicalDeviceProperties getProperties() const
19074 {
19075 PhysicalDeviceProperties properties;
19076 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
19077 return properties;
19078 }
19079#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19080
19081 void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const
19082 {
19083 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
19084 }
19085
19086#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19087 template <typename Allocator = std::allocator<QueueFamilyProperties>>
19088 std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties() const
19089 {
19090 std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
19091 uint32_t queueFamilyPropertyCount;
19092 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
19093 queueFamilyProperties.resize( queueFamilyPropertyCount );
19094 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
19095 return queueFamilyProperties;
19096 }
19097#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19098
19099 void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const
19100 {
19101 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
19102 }
19103
19104#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19105 PhysicalDeviceMemoryProperties getMemoryProperties() const
19106 {
19107 PhysicalDeviceMemoryProperties memoryProperties;
19108 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
19109 return memoryProperties;
19110 }
19111#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19112
19113 void getFeatures( PhysicalDeviceFeatures* pFeatures ) const
19114 {
19115 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
19116 }
19117
19118#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19119 PhysicalDeviceFeatures getFeatures() const
19120 {
19121 PhysicalDeviceFeatures features;
19122 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
19123 return features;
19124 }
19125#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19126
19127 void getFormatProperties( Format format, FormatProperties* pFormatProperties ) const
19128 {
19129 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
19130 }
19131
19132#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19133 FormatProperties getFormatProperties( Format format ) const
19134 {
19135 FormatProperties formatProperties;
19136 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
19137 return formatProperties;
19138 }
19139#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19140
19141 Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const
19142 {
19143 return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
19144 }
19145
19146#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19147 ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const
19148 {
19149 ImageFormatProperties imageFormatProperties;
19150 Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
19151 return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties" );
19152 }
19153#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19154
19155 Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const
19156 {
19157 return static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
19158 }
19159
19160#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19161 ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19162 {
19163 Device device;
19164 Result result = static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDevice*>( &device ) ) );
19165 return createResultValue( result, device, "vk::PhysicalDevice::createDevice" );
19166 }
19167#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19168
19169 Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const
19170 {
19171 return static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
19172 }
19173
19174#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19175 template <typename Allocator = std::allocator<LayerProperties>>
19176 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties() const
19177 {
19178 std::vector<LayerProperties,Allocator> properties;
19179 uint32_t propertyCount;
19180 Result result;
19181 do
19182 {
19183 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
19184 if ( ( result == Result::eSuccess ) && propertyCount )
19185 {
19186 properties.resize( propertyCount );
19187 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
19188 }
19189 } while ( result == Result::eIncomplete );
19190 assert( propertyCount <= properties.size() );
19191 properties.resize( propertyCount );
19192 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceLayerProperties" );
19193 }
19194#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19195
19196 Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const
19197 {
19198 return static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
19199 }
19200
19201#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19202 template <typename Allocator = std::allocator<ExtensionProperties>>
19203 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr ) const
19204 {
19205 std::vector<ExtensionProperties,Allocator> properties;
19206 uint32_t propertyCount;
19207 Result result;
19208 do
19209 {
19210 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
19211 if ( ( result == Result::eSuccess ) && propertyCount )
19212 {
19213 properties.resize( propertyCount );
19214 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
19215 }
19216 } while ( result == Result::eIncomplete );
19217 assert( propertyCount <= properties.size() );
19218 properties.resize( propertyCount );
19219 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceExtensionProperties" );
19220 }
19221#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19222
19223 void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const
19224 {
19225 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
19226 }
19227
19228#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19229 template <typename Allocator = std::allocator<SparseImageFormatProperties>>
19230 std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const
19231 {
19232 std::vector<SparseImageFormatProperties,Allocator> properties;
19233 uint32_t propertyCount;
19234 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
19235 properties.resize( propertyCount );
19236 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
19237 return properties;
19238 }
19239#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19240
19241 Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const
19242 {
19243 return static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
19244 }
19245
19246#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19247 template <typename Allocator = std::allocator<DisplayPropertiesKHR>>
19248 typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR() const
19249 {
19250 std::vector<DisplayPropertiesKHR,Allocator> properties;
19251 uint32_t propertyCount;
19252 Result result;
19253 do
19254 {
19255 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
19256 if ( ( result == Result::eSuccess ) && propertyCount )
19257 {
19258 properties.resize( propertyCount );
19259 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
19260 }
19261 } while ( result == Result::eIncomplete );
19262 assert( propertyCount <= properties.size() );
19263 properties.resize( propertyCount );
19264 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPropertiesKHR" );
19265 }
19266#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19267
19268 Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const
19269 {
19270 return static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
19271 }
19272
19273#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19274 template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>>
19275 typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR() const
19276 {
19277 std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
19278 uint32_t propertyCount;
19279 Result result;
19280 do
19281 {
19282 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
19283 if ( ( result == Result::eSuccess ) && propertyCount )
19284 {
19285 properties.resize( propertyCount );
19286 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
19287 }
19288 } while ( result == Result::eIncomplete );
19289 assert( propertyCount <= properties.size() );
19290 properties.resize( propertyCount );
19291 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPlanePropertiesKHR" );
19292 }
19293#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19294
19295 Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const
19296 {
19297 return static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
19298 }
19299
19300#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19301 template <typename Allocator = std::allocator<DisplayKHR>>
19302 typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
19303 {
19304 std::vector<DisplayKHR,Allocator> displays;
19305 uint32_t displayCount;
19306 Result result;
19307 do
19308 {
19309 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
19310 if ( ( result == Result::eSuccess ) && displayCount )
19311 {
19312 displays.resize( displayCount );
19313 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
19314 }
19315 } while ( result == Result::eIncomplete );
19316 assert( displayCount <= displays.size() );
19317 displays.resize( displayCount );
19318 return createResultValue( result, displays, "vk::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
19319 }
19320#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19321
19322 Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const
19323 {
19324 return static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
19325 }
19326
19327#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19328 template <typename Allocator = std::allocator<DisplayModePropertiesKHR>>
19329 typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display ) const
19330 {
19331 std::vector<DisplayModePropertiesKHR,Allocator> properties;
19332 uint32_t propertyCount;
19333 Result result;
19334 do
19335 {
19336 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
19337 if ( ( result == Result::eSuccess ) && propertyCount )
19338 {
19339 properties.resize( propertyCount );
19340 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
19341 }
19342 } while ( result == Result::eIncomplete );
19343 assert( propertyCount <= properties.size() );
19344 properties.resize( propertyCount );
19345 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayModePropertiesKHR" );
19346 }
19347#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19348
19349 Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const
19350 {
19351 return static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
19352 }
19353
19354#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19355 ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19356 {
19357 DisplayModeKHR mode;
19358 Result result = static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
19359 return createResultValue( result, mode, "vk::PhysicalDevice::createDisplayModeKHR" );
19360 }
19361#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19362
19363 Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const
19364 {
19365 return static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
19366 }
19367
19368#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19369 ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const
19370 {
19371 DisplayPlaneCapabilitiesKHR capabilities;
19372 Result result = static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
19373 return createResultValue( result, capabilities, "vk::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
19374 }
19375#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19376
19377#ifdef VK_USE_PLATFORM_MIR_KHR
19378 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const
19379 {
19380 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection );
19381 }
19382#endif /*VK_USE_PLATFORM_MIR_KHR*/
19383
19384#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19385#ifdef VK_USE_PLATFORM_MIR_KHR
19386 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const
19387 {
19388 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection );
19389 }
19390#endif /*VK_USE_PLATFORM_MIR_KHR*/
19391#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19392
19393 Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const
19394 {
19395 return static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
19396 }
19397
19398#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19399 ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const
19400 {
19401 Bool32 supported;
19402 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
19403 return createResultValue( result, supported, "vk::PhysicalDevice::getSurfaceSupportKHR" );
19404 }
19405#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19406
19407 Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const
19408 {
19409 return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
19410 }
19411
19412#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19413 ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const
19414 {
19415 SurfaceCapabilitiesKHR surfaceCapabilities;
19416 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
19417 return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilitiesKHR" );
19418 }
19419#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19420
19421 Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const
19422 {
19423 return static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
19424 }
19425
19426#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19427 template <typename Allocator = std::allocator<SurfaceFormatKHR>>
19428 typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface ) const
19429 {
19430 std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
19431 uint32_t surfaceFormatCount;
19432 Result result;
19433 do
19434 {
19435 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
19436 if ( ( result == Result::eSuccess ) && surfaceFormatCount )
19437 {
19438 surfaceFormats.resize( surfaceFormatCount );
19439 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
19440 }
19441 } while ( result == Result::eIncomplete );
19442 assert( surfaceFormatCount <= surfaceFormats.size() );
19443 surfaceFormats.resize( surfaceFormatCount );
19444 return createResultValue( result, surfaceFormats, "vk::PhysicalDevice::getSurfaceFormatsKHR" );
19445 }
19446#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19447
19448 Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const
19449 {
19450 return static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
19451 }
19452
19453#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19454 template <typename Allocator = std::allocator<PresentModeKHR>>
19455 typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface ) const
19456 {
19457 std::vector<PresentModeKHR,Allocator> presentModes;
19458 uint32_t presentModeCount;
19459 Result result;
19460 do
19461 {
19462 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
19463 if ( ( result == Result::eSuccess ) && presentModeCount )
19464 {
19465 presentModes.resize( presentModeCount );
19466 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
19467 }
19468 } while ( result == Result::eIncomplete );
19469 assert( presentModeCount <= presentModes.size() );
19470 presentModes.resize( presentModeCount );
19471 return createResultValue( result, presentModes, "vk::PhysicalDevice::getSurfacePresentModesKHR" );
19472 }
19473#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19474
19475#ifdef VK_USE_PLATFORM_WAYLAND_KHR
19476 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const
19477 {
19478 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
19479 }
19480#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
19481
19482#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19483#ifdef VK_USE_PLATFORM_WAYLAND_KHR
19484 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const
19485 {
19486 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
19487 }
19488#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
19489#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19490
19491#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19492#ifdef VK_USE_PLATFORM_WIN32_KHR
19493 Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
19494 {
19495 return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
19496 }
19497#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19498#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19499
19500#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19501#ifdef VK_USE_PLATFORM_WIN32_KHR
19502 Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
19503 {
19504 return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
19505 }
19506#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19507#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19508
19509#ifdef VK_USE_PLATFORM_XLIB_KHR
19510 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const
19511 {
19512 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
19513 }
19514#endif /*VK_USE_PLATFORM_XLIB_KHR*/
19515
19516#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19517#ifdef VK_USE_PLATFORM_XLIB_KHR
19518 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const
19519 {
19520 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
19521 }
19522#endif /*VK_USE_PLATFORM_XLIB_KHR*/
19523#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19524
19525#ifdef VK_USE_PLATFORM_XCB_KHR
19526 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const
19527 {
19528 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
19529 }
19530#endif /*VK_USE_PLATFORM_XCB_KHR*/
19531
19532#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19533#ifdef VK_USE_PLATFORM_XCB_KHR
19534 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const
19535 {
19536 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
19537 }
19538#endif /*VK_USE_PLATFORM_XCB_KHR*/
19539#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19540
Lenny Komow6501c122016-08-31 15:03:49 -060019541 Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const
19542 {
19543 return static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
19544 }
19545
19546#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19547 ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
19548 {
19549 ExternalImageFormatPropertiesNV externalImageFormatProperties;
19550 Result result = static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
19551 return createResultValue( result, externalImageFormatProperties, "vk::PhysicalDevice::getExternalImageFormatPropertiesNV" );
19552 }
19553#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19554
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019555 void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
19556 {
19557 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
19558 }
19559
19560#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19561 void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, DeviceGeneratedCommandsLimitsNVX & limits ) const
19562 {
19563 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
19564 }
19565#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19566
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019567#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19568 explicit
19569#endif
19570 operator VkPhysicalDevice() const
19571 {
19572 return m_physicalDevice;
19573 }
19574
19575 explicit operator bool() const
19576 {
19577 return m_physicalDevice != VK_NULL_HANDLE;
19578 }
19579
19580 bool operator!() const
19581 {
19582 return m_physicalDevice == VK_NULL_HANDLE;
19583 }
19584
19585 private:
19586 VkPhysicalDevice m_physicalDevice;
19587 };
19588 static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
19589
19590 class Instance
19591 {
19592 public:
19593 Instance()
19594 : m_instance(VK_NULL_HANDLE)
19595 {}
19596
19597#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19598 Instance(VkInstance instance)
19599 : m_instance(instance)
19600 {}
19601
19602 Instance& operator=(VkInstance instance)
19603 {
19604 m_instance = instance;
19605 return *this;
19606 }
19607#endif
19608
Lenny Komowebf33162016-08-26 14:10:08 -060019609 bool operator==(Instance const &rhs) const
19610 {
19611 return m_instance == rhs.m_instance;
19612 }
19613
19614 bool operator!=(Instance const &rhs) const
19615 {
19616 return m_instance != rhs.m_instance;
19617 }
19618
19619 bool operator<(Instance const &rhs) const
19620 {
19621 return m_instance < rhs.m_instance;
19622 }
19623
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019624 void destroy( const AllocationCallbacks* pAllocator ) const
19625 {
19626 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19627 }
19628
19629#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19630 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const
19631 {
19632 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19633 }
19634#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19635
19636 Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const
19637 {
19638 return static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
19639 }
19640
19641#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19642 template <typename Allocator = std::allocator<PhysicalDevice>>
19643 typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices() const
19644 {
19645 std::vector<PhysicalDevice,Allocator> physicalDevices;
19646 uint32_t physicalDeviceCount;
19647 Result result;
19648 do
19649 {
19650 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
19651 if ( ( result == Result::eSuccess ) && physicalDeviceCount )
19652 {
19653 physicalDevices.resize( physicalDeviceCount );
19654 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
19655 }
19656 } while ( result == Result::eIncomplete );
19657 assert( physicalDeviceCount <= physicalDevices.size() );
19658 physicalDevices.resize( physicalDeviceCount );
19659 return createResultValue( result, physicalDevices, "vk::Instance::enumeratePhysicalDevices" );
19660 }
19661#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19662
19663 PFN_vkVoidFunction getProcAddr( const char* pName ) const
19664 {
19665 return vkGetInstanceProcAddr( m_instance, pName );
19666 }
19667
19668#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19669 PFN_vkVoidFunction getProcAddr( const std::string & name ) const
19670 {
19671 return vkGetInstanceProcAddr( m_instance, name.c_str() );
19672 }
19673#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19674
19675#ifdef VK_USE_PLATFORM_ANDROID_KHR
19676 Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19677 {
19678 return static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19679 }
19680#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
19681
19682#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19683#ifdef VK_USE_PLATFORM_ANDROID_KHR
19684 ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19685 {
19686 SurfaceKHR surface;
19687 Result result = static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19688 return createResultValue( result, surface, "vk::Instance::createAndroidSurfaceKHR" );
19689 }
19690#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
19691#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19692
19693 Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19694 {
19695 return static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19696 }
19697
19698#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19699 ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19700 {
19701 SurfaceKHR surface;
19702 Result result = static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19703 return createResultValue( result, surface, "vk::Instance::createDisplayPlaneSurfaceKHR" );
19704 }
19705#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19706
19707#ifdef VK_USE_PLATFORM_MIR_KHR
19708 Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19709 {
19710 return static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19711 }
19712#endif /*VK_USE_PLATFORM_MIR_KHR*/
19713
19714#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19715#ifdef VK_USE_PLATFORM_MIR_KHR
19716 ResultValueType<SurfaceKHR>::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19717 {
19718 SurfaceKHR surface;
19719 Result result = static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19720 return createResultValue( result, surface, "vk::Instance::createMirSurfaceKHR" );
19721 }
19722#endif /*VK_USE_PLATFORM_MIR_KHR*/
19723#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19724
19725 void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const
19726 {
19727 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19728 }
19729
19730#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19731 void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr ) const
19732 {
19733 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19734 }
19735#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19736
19737#ifdef VK_USE_PLATFORM_WAYLAND_KHR
19738 Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19739 {
19740 return static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19741 }
19742#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
19743
19744#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19745#ifdef VK_USE_PLATFORM_WAYLAND_KHR
19746 ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19747 {
19748 SurfaceKHR surface;
19749 Result result = static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19750 return createResultValue( result, surface, "vk::Instance::createWaylandSurfaceKHR" );
19751 }
19752#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
19753#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19754
19755#ifdef VK_USE_PLATFORM_WIN32_KHR
19756 Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19757 {
19758 return static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19759 }
19760#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19761
19762#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19763#ifdef VK_USE_PLATFORM_WIN32_KHR
19764 ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19765 {
19766 SurfaceKHR surface;
19767 Result result = static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19768 return createResultValue( result, surface, "vk::Instance::createWin32SurfaceKHR" );
19769 }
19770#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19771#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19772
19773#ifdef VK_USE_PLATFORM_XLIB_KHR
19774 Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19775 {
19776 return static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19777 }
19778#endif /*VK_USE_PLATFORM_XLIB_KHR*/
19779
19780#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19781#ifdef VK_USE_PLATFORM_XLIB_KHR
19782 ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19783 {
19784 SurfaceKHR surface;
19785 Result result = static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19786 return createResultValue( result, surface, "vk::Instance::createXlibSurfaceKHR" );
19787 }
19788#endif /*VK_USE_PLATFORM_XLIB_KHR*/
19789#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19790
19791#ifdef VK_USE_PLATFORM_XCB_KHR
19792 Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
19793 {
19794 return static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
19795 }
19796#endif /*VK_USE_PLATFORM_XCB_KHR*/
19797
19798#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19799#ifdef VK_USE_PLATFORM_XCB_KHR
19800 ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19801 {
19802 SurfaceKHR surface;
19803 Result result = static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
19804 return createResultValue( result, surface, "vk::Instance::createXcbSurfaceKHR" );
19805 }
19806#endif /*VK_USE_PLATFORM_XCB_KHR*/
19807#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19808
19809 Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const
19810 {
19811 return static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
19812 }
19813
19814#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19815 ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19816 {
19817 DebugReportCallbackEXT callback;
19818 Result result = static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
19819 return createResultValue( result, callback, "vk::Instance::createDebugReportCallbackEXT" );
19820 }
19821#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19822
19823 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const
19824 {
19825 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19826 }
19827
19828#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19829 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr ) const
19830 {
19831 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19832 }
19833#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19834
19835 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const
19836 {
19837 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
19838 }
19839
19840#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19841 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
19842 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019843#ifdef VULKAN_HPP_NO_EXCEPTIONS
19844 assert( layerPrefix.size() == message.size() );
19845#else
19846 if ( layerPrefix.size() != message.size() )
19847 {
19848 throw std::logic_error( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
19849 }
19850#endif // VULKAN_HPP_NO_EXCEPTIONS
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019851 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
19852 }
19853#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19854
19855#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19856 explicit
19857#endif
19858 operator VkInstance() const
19859 {
19860 return m_instance;
19861 }
19862
19863 explicit operator bool() const
19864 {
19865 return m_instance != VK_NULL_HANDLE;
19866 }
19867
19868 bool operator!() const
19869 {
19870 return m_instance == VK_NULL_HANDLE;
19871 }
19872
19873 private:
19874 VkInstance m_instance;
19875 };
19876 static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
19877
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019878 struct CmdProcessCommandsInfoNVX
Lenny Komow68432d72016-09-29 14:16:59 -060019879 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019880 CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
19881 : sType( StructureType::eCmdProcessCommandsInfoNVX )
Lenny Komow68432d72016-09-29 14:16:59 -060019882 , pNext( nullptr )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019883 , objectTable( objectTable_ )
19884 , indirectCommandsLayout( indirectCommandsLayout_ )
19885 , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
19886 , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
19887 , maxSequencesCount( maxSequencesCount_ )
19888 , targetCommandBuffer( targetCommandBuffer_ )
19889 , sequencesCountBuffer( sequencesCountBuffer_ )
19890 , sequencesCountOffset( sequencesCountOffset_ )
19891 , sequencesIndexBuffer( sequencesIndexBuffer_ )
19892 , sequencesIndexOffset( sequencesIndexOffset_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019893 {
19894 }
19895
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019896 CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow68432d72016-09-29 14:16:59 -060019897 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019898 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow68432d72016-09-29 14:16:59 -060019899 }
19900
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019901 CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow68432d72016-09-29 14:16:59 -060019902 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019903 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow68432d72016-09-29 14:16:59 -060019904 return *this;
19905 }
19906
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019907 CmdProcessCommandsInfoNVX& setSType( StructureType sType_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019908 {
19909 sType = sType_;
19910 return *this;
19911 }
19912
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019913 CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019914 {
19915 pNext = pNext_;
19916 return *this;
19917 }
19918
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019919 CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019920 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019921 objectTable = objectTable_;
Lenny Komow68432d72016-09-29 14:16:59 -060019922 return *this;
19923 }
19924
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019925 CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019926 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019927 indirectCommandsLayout = indirectCommandsLayout_;
Lenny Komow68432d72016-09-29 14:16:59 -060019928 return *this;
19929 }
19930
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019931 CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
Lenny Komow68432d72016-09-29 14:16:59 -060019932 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019933 indirectCommandsTokenCount = indirectCommandsTokenCount_;
19934 return *this;
Lenny Komow68432d72016-09-29 14:16:59 -060019935 }
19936
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019937 CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
19938 {
19939 pIndirectCommandsTokens = pIndirectCommandsTokens_;
19940 return *this;
19941 }
19942
19943 CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
19944 {
19945 maxSequencesCount = maxSequencesCount_;
19946 return *this;
19947 }
19948
19949 CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
19950 {
19951 targetCommandBuffer = targetCommandBuffer_;
19952 return *this;
19953 }
19954
19955 CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
19956 {
19957 sequencesCountBuffer = sequencesCountBuffer_;
19958 return *this;
19959 }
19960
19961 CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
19962 {
19963 sequencesCountOffset = sequencesCountOffset_;
19964 return *this;
19965 }
19966
19967 CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
19968 {
19969 sequencesIndexBuffer = sequencesIndexBuffer_;
19970 return *this;
19971 }
19972
19973 CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
19974 {
19975 sequencesIndexOffset = sequencesIndexOffset_;
19976 return *this;
19977 }
19978
19979 operator const VkCmdProcessCommandsInfoNVX&() const
19980 {
19981 return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
19982 }
19983
19984 bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow68432d72016-09-29 14:16:59 -060019985 {
19986 return ( sType == rhs.sType )
19987 && ( pNext == rhs.pNext )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019988 && ( objectTable == rhs.objectTable )
19989 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
19990 && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
19991 && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
19992 && ( maxSequencesCount == rhs.maxSequencesCount )
19993 && ( targetCommandBuffer == rhs.targetCommandBuffer )
19994 && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
19995 && ( sequencesCountOffset == rhs.sequencesCountOffset )
19996 && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
19997 && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
Lenny Komow68432d72016-09-29 14:16:59 -060019998 }
19999
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020000 bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow68432d72016-09-29 14:16:59 -060020001 {
20002 return !operator==( rhs );
20003 }
20004
20005 private:
20006 StructureType sType;
20007
20008 public:
20009 const void* pNext;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020010 ObjectTableNVX objectTable;
20011 IndirectCommandsLayoutNVX indirectCommandsLayout;
20012 uint32_t indirectCommandsTokenCount;
20013 const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
20014 uint32_t maxSequencesCount;
20015 CommandBuffer targetCommandBuffer;
20016 Buffer sequencesCountBuffer;
20017 DeviceSize sequencesCountOffset;
20018 Buffer sequencesIndexBuffer;
20019 DeviceSize sequencesIndexOffset;
Lenny Komow68432d72016-09-29 14:16:59 -060020020 };
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020021 static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
Lenny Komow68432d72016-09-29 14:16:59 -060020022
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020023 VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020024 {
20025 return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
20026 }
20027
20028#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020029 VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020030 {
20031 Instance instance;
20032 Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkInstance*>( &instance ) ) );
20033 return createResultValue( result, instance, "vk::createInstance" );
20034 }
20035#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20036
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020037 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020038 {
20039 return "(void)";
20040 }
20041
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020042 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020043 {
20044 return "{}";
20045 }
20046
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020047 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020048 {
20049 return "(void)";
20050 }
20051
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020052 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020053 {
20054 return "{}";
20055 }
20056
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020057 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020058 {
20059 return "(void)";
20060 }
20061
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020062 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020063 {
20064 return "{}";
20065 }
20066
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020067 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020068 {
20069 return "(void)";
20070 }
20071
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020072 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020073 {
20074 return "{}";
20075 }
20076
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020077 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020078 {
20079 return "(void)";
20080 }
20081
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020082 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020083 {
20084 return "{}";
20085 }
20086
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020087 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020088 {
20089 return "(void)";
20090 }
20091
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020092 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020093 {
20094 return "{}";
20095 }
20096
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020097 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020098 {
20099 return "(void)";
20100 }
20101
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020102 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020103 {
20104 return "{}";
20105 }
20106
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020107 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020108 {
20109 return "(void)";
20110 }
20111
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020112 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020113 {
20114 return "{}";
20115 }
20116
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020117 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020118 {
20119 return "(void)";
20120 }
20121
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020122 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020123 {
20124 return "{}";
20125 }
20126
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020127 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020128 {
20129 return "(void)";
20130 }
20131
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020132 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020133 {
20134 return "{}";
20135 }
20136
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020137 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020138 {
20139 return "(void)";
20140 }
20141
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020142 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020143 {
20144 return "{}";
20145 }
20146
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020147 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020148 {
20149 return "(void)";
20150 }
20151
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020152 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020153 {
20154 return "{}";
20155 }
20156
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020157 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020158 {
20159 return "(void)";
20160 }
20161
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020162 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020163 {
20164 return "{}";
20165 }
20166
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020167 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020168 {
20169 return "(void)";
20170 }
20171
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020172 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020173 {
20174 return "{}";
20175 }
20176
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020177 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020178 {
20179 return "(void)";
20180 }
20181
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020182 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020183 {
20184 return "{}";
20185 }
20186
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020187 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020188 {
20189 return "(void)";
20190 }
20191
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020192 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020193 {
20194 return "{}";
20195 }
20196
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020197 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020198 {
20199 return "(void)";
20200 }
20201
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020202 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020203 {
20204 return "{}";
20205 }
20206
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020207 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020208 {
20209 return "(void)";
20210 }
20211
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020212 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020213 {
20214 return "{}";
20215 }
20216
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020217 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020218 {
20219 return "(void)";
20220 }
20221
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020222 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020223 {
20224 return "{}";
20225 }
20226
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020227 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020228 {
20229 return "(void)";
20230 }
20231
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020232 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020233 {
20234 return "{}";
20235 }
20236
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020237 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020238 {
20239 return "(void)";
20240 }
20241
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020242 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020243 {
20244 return "{}";
20245 }
20246
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020247 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020248 {
20249 return "(void)";
20250 }
20251
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020252 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020253 {
20254 return "{}";
20255 }
20256
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020257 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020258 {
20259 return "(void)";
20260 }
20261
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020262 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020263 {
20264 return "{}";
20265 }
20266
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020267 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020268 {
20269 return "(void)";
20270 }
20271
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020272 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020273 {
20274 return "{}";
20275 }
20276
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020277 VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020278 {
20279 return "(void)";
20280 }
20281
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020282 VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020283 {
20284 return "{}";
20285 }
20286
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020287 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020288 {
20289 return "(void)";
20290 }
20291
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020292 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020293 {
20294 return "{}";
20295 }
20296
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020297 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020298 {
20299 return "(void)";
20300 }
20301
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020302 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020303 {
20304 return "{}";
20305 }
20306
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020307 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020308 {
20309 return "(void)";
20310 }
20311
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020312 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020313 {
20314 return "{}";
20315 }
20316
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020317 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020318 {
20319 return "(void)";
20320 }
20321
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020322 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020323 {
20324 return "{}";
20325 }
20326
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020327 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020328 {
20329 return "(void)";
20330 }
20331
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020332 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020333 {
20334 return "{}";
20335 }
20336
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020337 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020338 {
20339 return "(void)";
20340 }
20341
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020342 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020343 {
20344 return "{}";
20345 }
20346
20347#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020348 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020349 {
20350 return "(void)";
20351 }
20352#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
20353
20354#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020355 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020356 {
20357 return "{}";
20358 }
20359#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
20360
20361#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020362 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020363 {
20364 return "(void)";
20365 }
20366#endif /*VK_USE_PLATFORM_MIR_KHR*/
20367
20368#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020369 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020370 {
20371 return "{}";
20372 }
20373#endif /*VK_USE_PLATFORM_MIR_KHR*/
20374
20375#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020376 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020377 {
20378 return "(void)";
20379 }
20380#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20381
20382#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020383 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020384 {
20385 return "{}";
20386 }
20387#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20388
20389#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020390 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020391 {
20392 return "(void)";
20393 }
20394#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20395
20396#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020397 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020398 {
20399 return "{}";
20400 }
20401#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20402
20403#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020404 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020405 {
20406 return "(void)";
20407 }
20408#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20409
20410#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020411 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020412 {
20413 return "{}";
20414 }
20415#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20416
20417#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020418 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020419 {
20420 return "(void)";
20421 }
20422#endif /*VK_USE_PLATFORM_XCB_KHR*/
20423
20424#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020425 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020426 {
20427 return "{}";
20428 }
20429#endif /*VK_USE_PLATFORM_XCB_KHR*/
20430
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020431 VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020432 {
20433 switch (value)
20434 {
20435 case ImageLayout::eUndefined: return "Undefined";
20436 case ImageLayout::eGeneral: return "General";
20437 case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
20438 case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
20439 case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
20440 case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
20441 case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
20442 case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
20443 case ImageLayout::ePreinitialized: return "Preinitialized";
20444 case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
20445 default: return "invalid";
20446 }
20447 }
20448
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020449 VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020450 {
20451 switch (value)
20452 {
20453 case AttachmentLoadOp::eLoad: return "Load";
20454 case AttachmentLoadOp::eClear: return "Clear";
20455 case AttachmentLoadOp::eDontCare: return "DontCare";
20456 default: return "invalid";
20457 }
20458 }
20459
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020460 VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020461 {
20462 switch (value)
20463 {
20464 case AttachmentStoreOp::eStore: return "Store";
20465 case AttachmentStoreOp::eDontCare: return "DontCare";
20466 default: return "invalid";
20467 }
20468 }
20469
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020470 VULKAN_HPP_INLINE std::string to_string(ImageType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020471 {
20472 switch (value)
20473 {
20474 case ImageType::e1D: return "1D";
20475 case ImageType::e2D: return "2D";
20476 case ImageType::e3D: return "3D";
20477 default: return "invalid";
20478 }
20479 }
20480
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020481 VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020482 {
20483 switch (value)
20484 {
20485 case ImageTiling::eOptimal: return "Optimal";
20486 case ImageTiling::eLinear: return "Linear";
20487 default: return "invalid";
20488 }
20489 }
20490
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020491 VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020492 {
20493 switch (value)
20494 {
20495 case ImageViewType::e1D: return "1D";
20496 case ImageViewType::e2D: return "2D";
20497 case ImageViewType::e3D: return "3D";
20498 case ImageViewType::eCube: return "Cube";
20499 case ImageViewType::e1DArray: return "1DArray";
20500 case ImageViewType::e2DArray: return "2DArray";
20501 case ImageViewType::eCubeArray: return "CubeArray";
20502 default: return "invalid";
20503 }
20504 }
20505
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020506 VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020507 {
20508 switch (value)
20509 {
20510 case CommandBufferLevel::ePrimary: return "Primary";
20511 case CommandBufferLevel::eSecondary: return "Secondary";
20512 default: return "invalid";
20513 }
20514 }
20515
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020516 VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020517 {
20518 switch (value)
20519 {
20520 case ComponentSwizzle::eIdentity: return "Identity";
20521 case ComponentSwizzle::eZero: return "Zero";
20522 case ComponentSwizzle::eOne: return "One";
20523 case ComponentSwizzle::eR: return "R";
20524 case ComponentSwizzle::eG: return "G";
20525 case ComponentSwizzle::eB: return "B";
20526 case ComponentSwizzle::eA: return "A";
20527 default: return "invalid";
20528 }
20529 }
20530
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020531 VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020532 {
20533 switch (value)
20534 {
20535 case DescriptorType::eSampler: return "Sampler";
20536 case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
20537 case DescriptorType::eSampledImage: return "SampledImage";
20538 case DescriptorType::eStorageImage: return "StorageImage";
20539 case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
20540 case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
20541 case DescriptorType::eUniformBuffer: return "UniformBuffer";
20542 case DescriptorType::eStorageBuffer: return "StorageBuffer";
20543 case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
20544 case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
20545 case DescriptorType::eInputAttachment: return "InputAttachment";
20546 default: return "invalid";
20547 }
20548 }
20549
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020550 VULKAN_HPP_INLINE std::string to_string(QueryType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020551 {
20552 switch (value)
20553 {
20554 case QueryType::eOcclusion: return "Occlusion";
20555 case QueryType::ePipelineStatistics: return "PipelineStatistics";
20556 case QueryType::eTimestamp: return "Timestamp";
20557 default: return "invalid";
20558 }
20559 }
20560
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020561 VULKAN_HPP_INLINE std::string to_string(BorderColor value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020562 {
20563 switch (value)
20564 {
20565 case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
20566 case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
20567 case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
20568 case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
20569 case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
20570 case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
20571 default: return "invalid";
20572 }
20573 }
20574
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020575 VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020576 {
20577 switch (value)
20578 {
20579 case PipelineBindPoint::eGraphics: return "Graphics";
20580 case PipelineBindPoint::eCompute: return "Compute";
20581 default: return "invalid";
20582 }
20583 }
20584
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020585 VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020586 {
20587 switch (value)
20588 {
20589 case PipelineCacheHeaderVersion::eOne: return "One";
20590 default: return "invalid";
20591 }
20592 }
20593
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020594 VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020595 {
20596 switch (value)
20597 {
20598 case PrimitiveTopology::ePointList: return "PointList";
20599 case PrimitiveTopology::eLineList: return "LineList";
20600 case PrimitiveTopology::eLineStrip: return "LineStrip";
20601 case PrimitiveTopology::eTriangleList: return "TriangleList";
20602 case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
20603 case PrimitiveTopology::eTriangleFan: return "TriangleFan";
20604 case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
20605 case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
20606 case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
20607 case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
20608 case PrimitiveTopology::ePatchList: return "PatchList";
20609 default: return "invalid";
20610 }
20611 }
20612
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020613 VULKAN_HPP_INLINE std::string to_string(SharingMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020614 {
20615 switch (value)
20616 {
20617 case SharingMode::eExclusive: return "Exclusive";
20618 case SharingMode::eConcurrent: return "Concurrent";
20619 default: return "invalid";
20620 }
20621 }
20622
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020623 VULKAN_HPP_INLINE std::string to_string(IndexType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020624 {
20625 switch (value)
20626 {
20627 case IndexType::eUint16: return "Uint16";
20628 case IndexType::eUint32: return "Uint32";
20629 default: return "invalid";
20630 }
20631 }
20632
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020633 VULKAN_HPP_INLINE std::string to_string(Filter value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020634 {
20635 switch (value)
20636 {
20637 case Filter::eNearest: return "Nearest";
20638 case Filter::eLinear: return "Linear";
20639 case Filter::eCubicIMG: return "CubicIMG";
20640 default: return "invalid";
20641 }
20642 }
20643
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020644 VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020645 {
20646 switch (value)
20647 {
20648 case SamplerMipmapMode::eNearest: return "Nearest";
20649 case SamplerMipmapMode::eLinear: return "Linear";
20650 default: return "invalid";
20651 }
20652 }
20653
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020654 VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020655 {
20656 switch (value)
20657 {
20658 case SamplerAddressMode::eRepeat: return "Repeat";
20659 case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
20660 case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
20661 case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
20662 case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
20663 default: return "invalid";
20664 }
20665 }
20666
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020667 VULKAN_HPP_INLINE std::string to_string(CompareOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020668 {
20669 switch (value)
20670 {
20671 case CompareOp::eNever: return "Never";
20672 case CompareOp::eLess: return "Less";
20673 case CompareOp::eEqual: return "Equal";
20674 case CompareOp::eLessOrEqual: return "LessOrEqual";
20675 case CompareOp::eGreater: return "Greater";
20676 case CompareOp::eNotEqual: return "NotEqual";
20677 case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
20678 case CompareOp::eAlways: return "Always";
20679 default: return "invalid";
20680 }
20681 }
20682
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020683 VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020684 {
20685 switch (value)
20686 {
20687 case PolygonMode::eFill: return "Fill";
20688 case PolygonMode::eLine: return "Line";
20689 case PolygonMode::ePoint: return "Point";
20690 default: return "invalid";
20691 }
20692 }
20693
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020694 VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020695 {
20696 switch (value)
20697 {
20698 case CullModeFlagBits::eNone: return "None";
20699 case CullModeFlagBits::eFront: return "Front";
20700 case CullModeFlagBits::eBack: return "Back";
20701 case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
20702 default: return "invalid";
20703 }
20704 }
20705
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020706 VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020707 {
20708 if (!value) return "{}";
20709 std::string result;
20710 if (value & CullModeFlagBits::eNone) result += "None | ";
20711 if (value & CullModeFlagBits::eFront) result += "Front | ";
20712 if (value & CullModeFlagBits::eBack) result += "Back | ";
20713 if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
20714 return "{" + result.substr(0, result.size() - 3) + "}";
20715 }
20716
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020717 VULKAN_HPP_INLINE std::string to_string(FrontFace value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020718 {
20719 switch (value)
20720 {
20721 case FrontFace::eCounterClockwise: return "CounterClockwise";
20722 case FrontFace::eClockwise: return "Clockwise";
20723 default: return "invalid";
20724 }
20725 }
20726
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020727 VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020728 {
20729 switch (value)
20730 {
20731 case BlendFactor::eZero: return "Zero";
20732 case BlendFactor::eOne: return "One";
20733 case BlendFactor::eSrcColor: return "SrcColor";
20734 case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
20735 case BlendFactor::eDstColor: return "DstColor";
20736 case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
20737 case BlendFactor::eSrcAlpha: return "SrcAlpha";
20738 case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
20739 case BlendFactor::eDstAlpha: return "DstAlpha";
20740 case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
20741 case BlendFactor::eConstantColor: return "ConstantColor";
20742 case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
20743 case BlendFactor::eConstantAlpha: return "ConstantAlpha";
20744 case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
20745 case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
20746 case BlendFactor::eSrc1Color: return "Src1Color";
20747 case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
20748 case BlendFactor::eSrc1Alpha: return "Src1Alpha";
20749 case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
20750 default: return "invalid";
20751 }
20752 }
20753
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020754 VULKAN_HPP_INLINE std::string to_string(BlendOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020755 {
20756 switch (value)
20757 {
20758 case BlendOp::eAdd: return "Add";
20759 case BlendOp::eSubtract: return "Subtract";
20760 case BlendOp::eReverseSubtract: return "ReverseSubtract";
20761 case BlendOp::eMin: return "Min";
20762 case BlendOp::eMax: return "Max";
20763 default: return "invalid";
20764 }
20765 }
20766
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020767 VULKAN_HPP_INLINE std::string to_string(StencilOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020768 {
20769 switch (value)
20770 {
20771 case StencilOp::eKeep: return "Keep";
20772 case StencilOp::eZero: return "Zero";
20773 case StencilOp::eReplace: return "Replace";
20774 case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
20775 case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
20776 case StencilOp::eInvert: return "Invert";
20777 case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
20778 case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
20779 default: return "invalid";
20780 }
20781 }
20782
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020783 VULKAN_HPP_INLINE std::string to_string(LogicOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020784 {
20785 switch (value)
20786 {
20787 case LogicOp::eClear: return "Clear";
20788 case LogicOp::eAnd: return "And";
20789 case LogicOp::eAndReverse: return "AndReverse";
20790 case LogicOp::eCopy: return "Copy";
20791 case LogicOp::eAndInverted: return "AndInverted";
20792 case LogicOp::eNoOp: return "NoOp";
20793 case LogicOp::eXor: return "Xor";
20794 case LogicOp::eOr: return "Or";
20795 case LogicOp::eNor: return "Nor";
20796 case LogicOp::eEquivalent: return "Equivalent";
20797 case LogicOp::eInvert: return "Invert";
20798 case LogicOp::eOrReverse: return "OrReverse";
20799 case LogicOp::eCopyInverted: return "CopyInverted";
20800 case LogicOp::eOrInverted: return "OrInverted";
20801 case LogicOp::eNand: return "Nand";
20802 case LogicOp::eSet: return "Set";
20803 default: return "invalid";
20804 }
20805 }
20806
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020807 VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020808 {
20809 switch (value)
20810 {
20811 case InternalAllocationType::eExecutable: return "Executable";
20812 default: return "invalid";
20813 }
20814 }
20815
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020816 VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020817 {
20818 switch (value)
20819 {
20820 case SystemAllocationScope::eCommand: return "Command";
20821 case SystemAllocationScope::eObject: return "Object";
20822 case SystemAllocationScope::eCache: return "Cache";
20823 case SystemAllocationScope::eDevice: return "Device";
20824 case SystemAllocationScope::eInstance: return "Instance";
20825 default: return "invalid";
20826 }
20827 }
20828
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020829 VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020830 {
20831 switch (value)
20832 {
20833 case PhysicalDeviceType::eOther: return "Other";
20834 case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
20835 case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
20836 case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
20837 case PhysicalDeviceType::eCpu: return "Cpu";
20838 default: return "invalid";
20839 }
20840 }
20841
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020842 VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020843 {
20844 switch (value)
20845 {
20846 case VertexInputRate::eVertex: return "Vertex";
20847 case VertexInputRate::eInstance: return "Instance";
20848 default: return "invalid";
20849 }
20850 }
20851
Mark Lobodzinski2d589822016-12-12 09:44:34 -070020852 VULKAN_HPP_INLINE std::string to_string(Format value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060020853 {
20854 switch (value)
20855 {
20856 case Format::eUndefined: return "Undefined";
20857 case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
20858 case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
20859 case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
20860 case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
20861 case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
20862 case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
20863 case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
20864 case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
20865 case Format::eR8Unorm: return "R8Unorm";
20866 case Format::eR8Snorm: return "R8Snorm";
20867 case Format::eR8Uscaled: return "R8Uscaled";
20868 case Format::eR8Sscaled: return "R8Sscaled";
20869 case Format::eR8Uint: return "R8Uint";
20870 case Format::eR8Sint: return "R8Sint";
20871 case Format::eR8Srgb: return "R8Srgb";
20872 case Format::eR8G8Unorm: return "R8G8Unorm";
20873 case Format::eR8G8Snorm: return "R8G8Snorm";
20874 case Format::eR8G8Uscaled: return "R8G8Uscaled";
20875 case Format::eR8G8Sscaled: return "R8G8Sscaled";
20876 case Format::eR8G8Uint: return "R8G8Uint";
20877 case Format::eR8G8Sint: return "R8G8Sint";
20878 case Format::eR8G8Srgb: return "R8G8Srgb";
20879 case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
20880 case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
20881 case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
20882 case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
20883 case Format::eR8G8B8Uint: return "R8G8B8Uint";
20884 case Format::eR8G8B8Sint: return "R8G8B8Sint";
20885 case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
20886 case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
20887 case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
20888 case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
20889 case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
20890 case Format::eB8G8R8Uint: return "B8G8R8Uint";
20891 case Format::eB8G8R8Sint: return "B8G8R8Sint";
20892 case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
20893 case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
20894 case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
20895 case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
20896 case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
20897 case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
20898 case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
20899 case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
20900 case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
20901 case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
20902 case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
20903 case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
20904 case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
20905 case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
20906 case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
20907 case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
20908 case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
20909 case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
20910 case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
20911 case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
20912 case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
20913 case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
20914 case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
20915 case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
20916 case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
20917 case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
20918 case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
20919 case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
20920 case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
20921 case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
20922 case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
20923 case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
20924 case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
20925 case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
20926 case Format::eR16Unorm: return "R16Unorm";
20927 case Format::eR16Snorm: return "R16Snorm";
20928 case Format::eR16Uscaled: return "R16Uscaled";
20929 case Format::eR16Sscaled: return "R16Sscaled";
20930 case Format::eR16Uint: return "R16Uint";
20931 case Format::eR16Sint: return "R16Sint";
20932 case Format::eR16Sfloat: return "R16Sfloat";
20933 case Format::eR16G16Unorm: return "R16G16Unorm";
20934 case Format::eR16G16Snorm: return "R16G16Snorm";
20935 case Format::eR16G16Uscaled: return "R16G16Uscaled";
20936 case Format::eR16G16Sscaled: return "R16G16Sscaled";
20937 case Format::eR16G16Uint: return "R16G16Uint";
20938 case Format::eR16G16Sint: return "R16G16Sint";
20939 case Format::eR16G16Sfloat: return "R16G16Sfloat";
20940 case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
20941 case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
20942 case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
20943 case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
20944 case Format::eR16G16B16Uint: return "R16G16B16Uint";
20945 case Format::eR16G16B16Sint: return "R16G16B16Sint";
20946 case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
20947 case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
20948 case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
20949 case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
20950 case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
20951 case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
20952 case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
20953 case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
20954 case Format::eR32Uint: return "R32Uint";
20955 case Format::eR32Sint: return "R32Sint";
20956 case Format::eR32Sfloat: return "R32Sfloat";
20957 case Format::eR32G32Uint: return "R32G32Uint";
20958 case Format::eR32G32Sint: return "R32G32Sint";
20959 case Format::eR32G32Sfloat: return "R32G32Sfloat";
20960 case Format::eR32G32B32Uint: return "R32G32B32Uint";
20961 case Format::eR32G32B32Sint: return "R32G32B32Sint";
20962 case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
20963 case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
20964 case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
20965 case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
20966 case Format::eR64Uint: return "R64Uint";
20967 case Format::eR64Sint: return "R64Sint";
20968 case Format::eR64Sfloat: return "R64Sfloat";
20969 case Format::eR64G64Uint: return "R64G64Uint";
20970 case Format::eR64G64Sint: return "R64G64Sint";
20971 case Format::eR64G64Sfloat: return "R64G64Sfloat";
20972 case Format::eR64G64B64Uint: return "R64G64B64Uint";
20973 case Format::eR64G64B64Sint: return "R64G64B64Sint";
20974 case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
20975 case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
20976 case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
20977 case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
20978 case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
20979 case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
20980 case Format::eD16Unorm: return "D16Unorm";
20981 case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
20982 case Format::eD32Sfloat: return "D32Sfloat";
20983 case Format::eS8Uint: return "S8Uint";
20984 case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
20985 case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
20986 case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
20987 case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
20988 case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
20989 case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
20990 case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
20991 case Format::eBc2UnormBlock: return "Bc2UnormBlock";
20992 case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
20993 case Format::eBc3UnormBlock: return "Bc3UnormBlock";
20994 case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
20995 case Format::eBc4UnormBlock: return "Bc4UnormBlock";
20996 case Format::eBc4SnormBlock: return "Bc4SnormBlock";
20997 case Format::eBc5UnormBlock: return "Bc5UnormBlock";
20998 case Format::eBc5SnormBlock: return "Bc5SnormBlock";
20999 case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
21000 case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
21001 case Format::eBc7UnormBlock: return "Bc7UnormBlock";
21002 case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
21003 case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
21004 case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
21005 case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
21006 case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
21007 case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
21008 case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
21009 case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
21010 case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
21011 case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
21012 case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
21013 case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
21014 case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
21015 case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
21016 case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
21017 case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
21018 case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
21019 case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
21020 case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
21021 case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
21022 case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
21023 case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
21024 case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
21025 case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
21026 case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
21027 case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
21028 case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
21029 case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
21030 case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
21031 case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
21032 case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
21033 case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
21034 case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
21035 case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
21036 case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
21037 case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
21038 case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
21039 case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
21040 case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
Lenny Komowebf33162016-08-26 14:10:08 -060021041 case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
21042 case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
21043 case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
21044 case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
21045 case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
21046 case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
21047 case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
21048 case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021049 default: return "invalid";
21050 }
21051 }
21052
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021053 VULKAN_HPP_INLINE std::string to_string(StructureType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021054 {
21055 switch (value)
21056 {
21057 case StructureType::eApplicationInfo: return "ApplicationInfo";
21058 case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
21059 case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
21060 case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
21061 case StructureType::eSubmitInfo: return "SubmitInfo";
21062 case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
21063 case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
21064 case StructureType::eBindSparseInfo: return "BindSparseInfo";
21065 case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
21066 case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
21067 case StructureType::eEventCreateInfo: return "EventCreateInfo";
21068 case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
21069 case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
21070 case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
21071 case StructureType::eImageCreateInfo: return "ImageCreateInfo";
21072 case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
21073 case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
21074 case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
21075 case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
21076 case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
21077 case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
21078 case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
21079 case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
21080 case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
21081 case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
21082 case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
21083 case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
21084 case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
21085 case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
21086 case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
21087 case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
21088 case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
21089 case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
21090 case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
21091 case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
21092 case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
21093 case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
21094 case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
21095 case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
21096 case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
21097 case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
21098 case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
21099 case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
21100 case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
21101 case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
21102 case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
21103 case StructureType::eMemoryBarrier: return "MemoryBarrier";
21104 case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
21105 case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
21106 case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
21107 case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
21108 case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
21109 case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
21110 case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
21111 case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
21112 case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
21113 case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
21114 case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR";
21115 case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
21116 case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
21117 case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
21118 case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
21119 case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
21120 case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
21121 case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
21122 case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
21123 case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
21124 case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
Lenny Komow6501c122016-08-31 15:03:49 -060021125 case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
21126 case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
21127 case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
21128 case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
21129 case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
Lenny Komow68432d72016-09-29 14:16:59 -060021130 case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021131 case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
21132 case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
21133 case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
21134 case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
21135 case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
21136 case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021137 default: return "invalid";
21138 }
21139 }
21140
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021141 VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021142 {
21143 switch (value)
21144 {
21145 case SubpassContents::eInline: return "Inline";
21146 case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
21147 default: return "invalid";
21148 }
21149 }
21150
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021151 VULKAN_HPP_INLINE std::string to_string(DynamicState value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021152 {
21153 switch (value)
21154 {
21155 case DynamicState::eViewport: return "Viewport";
21156 case DynamicState::eScissor: return "Scissor";
21157 case DynamicState::eLineWidth: return "LineWidth";
21158 case DynamicState::eDepthBias: return "DepthBias";
21159 case DynamicState::eBlendConstants: return "BlendConstants";
21160 case DynamicState::eDepthBounds: return "DepthBounds";
21161 case DynamicState::eStencilCompareMask: return "StencilCompareMask";
21162 case DynamicState::eStencilWriteMask: return "StencilWriteMask";
21163 case DynamicState::eStencilReference: return "StencilReference";
21164 default: return "invalid";
21165 }
21166 }
21167
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021168 VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021169 {
21170 switch (value)
21171 {
21172 case QueueFlagBits::eGraphics: return "Graphics";
21173 case QueueFlagBits::eCompute: return "Compute";
21174 case QueueFlagBits::eTransfer: return "Transfer";
21175 case QueueFlagBits::eSparseBinding: return "SparseBinding";
21176 default: return "invalid";
21177 }
21178 }
21179
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021180 VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021181 {
21182 if (!value) return "{}";
21183 std::string result;
21184 if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
21185 if (value & QueueFlagBits::eCompute) result += "Compute | ";
21186 if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
21187 if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
21188 return "{" + result.substr(0, result.size() - 3) + "}";
21189 }
21190
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021191 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021192 {
21193 switch (value)
21194 {
21195 case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
21196 case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
21197 case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
21198 case MemoryPropertyFlagBits::eHostCached: return "HostCached";
21199 case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
21200 default: return "invalid";
21201 }
21202 }
21203
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021204 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021205 {
21206 if (!value) return "{}";
21207 std::string result;
21208 if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
21209 if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
21210 if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
21211 if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
21212 if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
21213 return "{" + result.substr(0, result.size() - 3) + "}";
21214 }
21215
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021216 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021217 {
21218 switch (value)
21219 {
21220 case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
21221 default: return "invalid";
21222 }
21223 }
21224
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021225 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021226 {
21227 if (!value) return "{}";
21228 std::string result;
21229 if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
21230 return "{" + result.substr(0, result.size() - 3) + "}";
21231 }
21232
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021233 VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021234 {
21235 switch (value)
21236 {
21237 case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
21238 case AccessFlagBits::eIndexRead: return "IndexRead";
21239 case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
21240 case AccessFlagBits::eUniformRead: return "UniformRead";
21241 case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
21242 case AccessFlagBits::eShaderRead: return "ShaderRead";
21243 case AccessFlagBits::eShaderWrite: return "ShaderWrite";
21244 case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
21245 case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
21246 case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
21247 case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
21248 case AccessFlagBits::eTransferRead: return "TransferRead";
21249 case AccessFlagBits::eTransferWrite: return "TransferWrite";
21250 case AccessFlagBits::eHostRead: return "HostRead";
21251 case AccessFlagBits::eHostWrite: return "HostWrite";
21252 case AccessFlagBits::eMemoryRead: return "MemoryRead";
21253 case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021254 case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
21255 case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021256 default: return "invalid";
21257 }
21258 }
21259
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021260 VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021261 {
21262 if (!value) return "{}";
21263 std::string result;
21264 if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
21265 if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
21266 if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
21267 if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
21268 if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
21269 if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
21270 if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
21271 if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
21272 if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
21273 if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
21274 if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
21275 if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
21276 if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
21277 if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
21278 if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
21279 if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
21280 if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021281 if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
21282 if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021283 return "{" + result.substr(0, result.size() - 3) + "}";
21284 }
21285
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021286 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021287 {
21288 switch (value)
21289 {
21290 case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
21291 case BufferUsageFlagBits::eTransferDst: return "TransferDst";
21292 case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
21293 case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
21294 case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
21295 case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
21296 case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
21297 case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
21298 case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
21299 default: return "invalid";
21300 }
21301 }
21302
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021303 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021304 {
21305 if (!value) return "{}";
21306 std::string result;
21307 if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
21308 if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
21309 if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
21310 if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
21311 if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
21312 if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
21313 if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
21314 if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
21315 if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
21316 return "{" + result.substr(0, result.size() - 3) + "}";
21317 }
21318
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021319 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021320 {
21321 switch (value)
21322 {
21323 case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
21324 case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
21325 case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
21326 default: return "invalid";
21327 }
21328 }
21329
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021330 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021331 {
21332 if (!value) return "{}";
21333 std::string result;
21334 if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
21335 if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
21336 if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
21337 return "{" + result.substr(0, result.size() - 3) + "}";
21338 }
21339
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021340 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021341 {
21342 switch (value)
21343 {
21344 case ShaderStageFlagBits::eVertex: return "Vertex";
21345 case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
21346 case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
21347 case ShaderStageFlagBits::eGeometry: return "Geometry";
21348 case ShaderStageFlagBits::eFragment: return "Fragment";
21349 case ShaderStageFlagBits::eCompute: return "Compute";
21350 case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
21351 case ShaderStageFlagBits::eAll: return "All";
21352 default: return "invalid";
21353 }
21354 }
21355
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021356 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021357 {
21358 if (!value) return "{}";
21359 std::string result;
21360 if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
21361 if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
21362 if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
21363 if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
21364 if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
21365 if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
21366 if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
21367 if (value & ShaderStageFlagBits::eAll) result += "All | ";
21368 return "{" + result.substr(0, result.size() - 3) + "}";
21369 }
21370
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021371 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021372 {
21373 switch (value)
21374 {
21375 case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
21376 case ImageUsageFlagBits::eTransferDst: return "TransferDst";
21377 case ImageUsageFlagBits::eSampled: return "Sampled";
21378 case ImageUsageFlagBits::eStorage: return "Storage";
21379 case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
21380 case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
21381 case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
21382 case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
21383 default: return "invalid";
21384 }
21385 }
21386
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021387 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021388 {
21389 if (!value) return "{}";
21390 std::string result;
21391 if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
21392 if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
21393 if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
21394 if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
21395 if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
21396 if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
21397 if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
21398 if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
21399 return "{" + result.substr(0, result.size() - 3) + "}";
21400 }
21401
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021402 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021403 {
21404 switch (value)
21405 {
21406 case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
21407 case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
21408 case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
21409 case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
21410 case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
21411 default: return "invalid";
21412 }
21413 }
21414
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021415 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021416 {
21417 if (!value) return "{}";
21418 std::string result;
21419 if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
21420 if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
21421 if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
21422 if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
21423 if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
21424 return "{" + result.substr(0, result.size() - 3) + "}";
21425 }
21426
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021427 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021428 {
21429 switch (value)
21430 {
21431 case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
21432 case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
21433 case PipelineCreateFlagBits::eDerivative: return "Derivative";
21434 default: return "invalid";
21435 }
21436 }
21437
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021438 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021439 {
21440 if (!value) return "{}";
21441 std::string result;
21442 if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
21443 if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
21444 if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
21445 return "{" + result.substr(0, result.size() - 3) + "}";
21446 }
21447
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021448 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021449 {
21450 switch (value)
21451 {
21452 case ColorComponentFlagBits::eR: return "R";
21453 case ColorComponentFlagBits::eG: return "G";
21454 case ColorComponentFlagBits::eB: return "B";
21455 case ColorComponentFlagBits::eA: return "A";
21456 default: return "invalid";
21457 }
21458 }
21459
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021460 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021461 {
21462 if (!value) return "{}";
21463 std::string result;
21464 if (value & ColorComponentFlagBits::eR) result += "R | ";
21465 if (value & ColorComponentFlagBits::eG) result += "G | ";
21466 if (value & ColorComponentFlagBits::eB) result += "B | ";
21467 if (value & ColorComponentFlagBits::eA) result += "A | ";
21468 return "{" + result.substr(0, result.size() - 3) + "}";
21469 }
21470
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021471 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021472 {
21473 switch (value)
21474 {
21475 case FenceCreateFlagBits::eSignaled: return "Signaled";
21476 default: return "invalid";
21477 }
21478 }
21479
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021480 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021481 {
21482 if (!value) return "{}";
21483 std::string result;
21484 if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
21485 return "{" + result.substr(0, result.size() - 3) + "}";
21486 }
21487
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021488 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021489 {
21490 switch (value)
21491 {
21492 case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
21493 case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
21494 case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
21495 case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
21496 case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
21497 case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
21498 case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
21499 case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
21500 case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
21501 case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
21502 case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
21503 case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
21504 case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
21505 case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
21506 default: return "invalid";
21507 }
21508 }
21509
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021510 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021511 {
21512 if (!value) return "{}";
21513 std::string result;
21514 if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
21515 if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
21516 if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
21517 if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
21518 if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
21519 if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
21520 if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
21521 if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
21522 if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
21523 if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
21524 if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
21525 if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
21526 if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
21527 if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
21528 return "{" + result.substr(0, result.size() - 3) + "}";
21529 }
21530
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021531 VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021532 {
21533 switch (value)
21534 {
21535 case QueryControlFlagBits::ePrecise: return "Precise";
21536 default: return "invalid";
21537 }
21538 }
21539
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021540 VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021541 {
21542 if (!value) return "{}";
21543 std::string result;
21544 if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
21545 return "{" + result.substr(0, result.size() - 3) + "}";
21546 }
21547
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021548 VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021549 {
21550 switch (value)
21551 {
21552 case QueryResultFlagBits::e64: return "64";
21553 case QueryResultFlagBits::eWait: return "Wait";
21554 case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
21555 case QueryResultFlagBits::ePartial: return "Partial";
21556 default: return "invalid";
21557 }
21558 }
21559
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021560 VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021561 {
21562 if (!value) return "{}";
21563 std::string result;
21564 if (value & QueryResultFlagBits::e64) result += "64 | ";
21565 if (value & QueryResultFlagBits::eWait) result += "Wait | ";
21566 if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
21567 if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
21568 return "{" + result.substr(0, result.size() - 3) + "}";
21569 }
21570
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021571 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021572 {
21573 switch (value)
21574 {
21575 case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
21576 case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
21577 case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
21578 default: return "invalid";
21579 }
21580 }
21581
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021582 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021583 {
21584 if (!value) return "{}";
21585 std::string result;
21586 if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
21587 if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
21588 if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
21589 return "{" + result.substr(0, result.size() - 3) + "}";
21590 }
21591
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021592 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021593 {
21594 switch (value)
21595 {
21596 case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
21597 case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
21598 case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
21599 case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
21600 case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
21601 case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
21602 case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
21603 case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
21604 case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
21605 case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
21606 case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
21607 default: return "invalid";
21608 }
21609 }
21610
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021611 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021612 {
21613 if (!value) return "{}";
21614 std::string result;
21615 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
21616 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
21617 if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
21618 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
21619 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
21620 if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
21621 if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
21622 if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
21623 if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
21624 if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
21625 if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
21626 return "{" + result.substr(0, result.size() - 3) + "}";
21627 }
21628
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021629 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021630 {
21631 switch (value)
21632 {
21633 case ImageAspectFlagBits::eColor: return "Color";
21634 case ImageAspectFlagBits::eDepth: return "Depth";
21635 case ImageAspectFlagBits::eStencil: return "Stencil";
21636 case ImageAspectFlagBits::eMetadata: return "Metadata";
21637 default: return "invalid";
21638 }
21639 }
21640
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021641 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021642 {
21643 if (!value) return "{}";
21644 std::string result;
21645 if (value & ImageAspectFlagBits::eColor) result += "Color | ";
21646 if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
21647 if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
21648 if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
21649 return "{" + result.substr(0, result.size() - 3) + "}";
21650 }
21651
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021652 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021653 {
21654 switch (value)
21655 {
21656 case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
21657 case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
21658 case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
21659 default: return "invalid";
21660 }
21661 }
21662
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021663 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021664 {
21665 if (!value) return "{}";
21666 std::string result;
21667 if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
21668 if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
21669 if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
21670 return "{" + result.substr(0, result.size() - 3) + "}";
21671 }
21672
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021673 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021674 {
21675 switch (value)
21676 {
21677 case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
21678 default: return "invalid";
21679 }
21680 }
21681
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021682 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021683 {
21684 if (!value) return "{}";
21685 std::string result;
21686 if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
21687 return "{" + result.substr(0, result.size() - 3) + "}";
21688 }
21689
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021690 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021691 {
21692 switch (value)
21693 {
21694 case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
21695 case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
21696 case PipelineStageFlagBits::eVertexInput: return "VertexInput";
21697 case PipelineStageFlagBits::eVertexShader: return "VertexShader";
21698 case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
21699 case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
21700 case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
21701 case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
21702 case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
21703 case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
21704 case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
21705 case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
21706 case PipelineStageFlagBits::eTransfer: return "Transfer";
21707 case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
21708 case PipelineStageFlagBits::eHost: return "Host";
21709 case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
21710 case PipelineStageFlagBits::eAllCommands: return "AllCommands";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021711 case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021712 default: return "invalid";
21713 }
21714 }
21715
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021716 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021717 {
21718 if (!value) return "{}";
21719 std::string result;
21720 if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
21721 if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
21722 if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
21723 if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
21724 if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
21725 if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
21726 if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
21727 if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
21728 if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
21729 if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
21730 if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
21731 if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
21732 if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
21733 if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
21734 if (value & PipelineStageFlagBits::eHost) result += "Host | ";
21735 if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
21736 if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021737 if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021738 return "{" + result.substr(0, result.size() - 3) + "}";
21739 }
21740
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021741 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021742 {
21743 switch (value)
21744 {
21745 case CommandPoolCreateFlagBits::eTransient: return "Transient";
21746 case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
21747 default: return "invalid";
21748 }
21749 }
21750
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021751 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021752 {
21753 if (!value) return "{}";
21754 std::string result;
21755 if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
21756 if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
21757 return "{" + result.substr(0, result.size() - 3) + "}";
21758 }
21759
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021760 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021761 {
21762 switch (value)
21763 {
21764 case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
21765 default: return "invalid";
21766 }
21767 }
21768
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021769 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021770 {
21771 if (!value) return "{}";
21772 std::string result;
21773 if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
21774 return "{" + result.substr(0, result.size() - 3) + "}";
21775 }
21776
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021777 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021778 {
21779 switch (value)
21780 {
21781 case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
21782 default: return "invalid";
21783 }
21784 }
21785
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021786 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021787 {
21788 if (!value) return "{}";
21789 std::string result;
21790 if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
21791 return "{" + result.substr(0, result.size() - 3) + "}";
21792 }
21793
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021794 VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021795 {
21796 switch (value)
21797 {
21798 case SampleCountFlagBits::e1: return "1";
21799 case SampleCountFlagBits::e2: return "2";
21800 case SampleCountFlagBits::e4: return "4";
21801 case SampleCountFlagBits::e8: return "8";
21802 case SampleCountFlagBits::e16: return "16";
21803 case SampleCountFlagBits::e32: return "32";
21804 case SampleCountFlagBits::e64: return "64";
21805 default: return "invalid";
21806 }
21807 }
21808
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021809 VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021810 {
21811 if (!value) return "{}";
21812 std::string result;
21813 if (value & SampleCountFlagBits::e1) result += "1 | ";
21814 if (value & SampleCountFlagBits::e2) result += "2 | ";
21815 if (value & SampleCountFlagBits::e4) result += "4 | ";
21816 if (value & SampleCountFlagBits::e8) result += "8 | ";
21817 if (value & SampleCountFlagBits::e16) result += "16 | ";
21818 if (value & SampleCountFlagBits::e32) result += "32 | ";
21819 if (value & SampleCountFlagBits::e64) result += "64 | ";
21820 return "{" + result.substr(0, result.size() - 3) + "}";
21821 }
21822
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021823 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021824 {
21825 switch (value)
21826 {
21827 case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
21828 default: return "invalid";
21829 }
21830 }
21831
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021832 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021833 {
21834 if (!value) return "{}";
21835 std::string result;
21836 if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
21837 return "{" + result.substr(0, result.size() - 3) + "}";
21838 }
21839
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021840 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021841 {
21842 switch (value)
21843 {
21844 case StencilFaceFlagBits::eFront: return "Front";
21845 case StencilFaceFlagBits::eBack: return "Back";
21846 case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
21847 default: return "invalid";
21848 }
21849 }
21850
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021851 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021852 {
21853 if (!value) return "{}";
21854 std::string result;
21855 if (value & StencilFaceFlagBits::eFront) result += "Front | ";
21856 if (value & StencilFaceFlagBits::eBack) result += "Back | ";
21857 if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
21858 return "{" + result.substr(0, result.size() - 3) + "}";
21859 }
21860
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021861 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021862 {
21863 switch (value)
21864 {
21865 case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
21866 default: return "invalid";
21867 }
21868 }
21869
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021870 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021871 {
21872 if (!value) return "{}";
21873 std::string result;
21874 if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
21875 return "{" + result.substr(0, result.size() - 3) + "}";
21876 }
21877
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021878 VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021879 {
21880 switch (value)
21881 {
21882 case DependencyFlagBits::eByRegion: return "ByRegion";
21883 default: return "invalid";
21884 }
21885 }
21886
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021887 VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021888 {
21889 if (!value) return "{}";
21890 std::string result;
21891 if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
21892 return "{" + result.substr(0, result.size() - 3) + "}";
21893 }
21894
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021895 VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021896 {
21897 switch (value)
21898 {
21899 case PresentModeKHR::eImmediate: return "Immediate";
21900 case PresentModeKHR::eMailbox: return "Mailbox";
21901 case PresentModeKHR::eFifo: return "Fifo";
21902 case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
21903 default: return "invalid";
21904 }
21905 }
21906
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021907 VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021908 {
21909 switch (value)
21910 {
21911 case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
21912 default: return "invalid";
21913 }
21914 }
21915
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021916 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021917 {
21918 switch (value)
21919 {
21920 case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
21921 case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
21922 case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
21923 case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
21924 default: return "invalid";
21925 }
21926 }
21927
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021928 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021929 {
21930 if (!value) return "{}";
21931 std::string result;
21932 if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
21933 if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
21934 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
21935 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
21936 return "{" + result.substr(0, result.size() - 3) + "}";
21937 }
21938
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021939 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021940 {
21941 switch (value)
21942 {
21943 case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
21944 case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
21945 case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
21946 case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
21947 default: return "invalid";
21948 }
21949 }
21950
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021951 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021952 {
21953 if (!value) return "{}";
21954 std::string result;
21955 if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
21956 if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
21957 if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
21958 if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
21959 return "{" + result.substr(0, result.size() - 3) + "}";
21960 }
21961
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021962 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021963 {
21964 switch (value)
21965 {
21966 case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
21967 case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
21968 case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
21969 case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
21970 case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
21971 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
21972 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
21973 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
21974 case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
21975 default: return "invalid";
21976 }
21977 }
21978
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021979 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021980 {
21981 if (!value) return "{}";
21982 std::string result;
21983 if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
21984 if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
21985 if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
21986 if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
21987 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
21988 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
21989 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
21990 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
21991 if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
21992 return "{" + result.substr(0, result.size() - 3) + "}";
21993 }
21994
Mark Lobodzinski2d589822016-12-12 09:44:34 -070021995 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021996 {
21997 switch (value)
21998 {
21999 case DebugReportFlagBitsEXT::eInformation: return "Information";
22000 case DebugReportFlagBitsEXT::eWarning: return "Warning";
22001 case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
22002 case DebugReportFlagBitsEXT::eError: return "Error";
22003 case DebugReportFlagBitsEXT::eDebug: return "Debug";
22004 default: return "invalid";
22005 }
22006 }
22007
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022008 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022009 {
22010 if (!value) return "{}";
22011 std::string result;
22012 if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
22013 if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
22014 if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
22015 if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
22016 if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
22017 return "{" + result.substr(0, result.size() - 3) + "}";
22018 }
22019
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022020 VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022021 {
22022 switch (value)
22023 {
22024 case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
22025 case DebugReportObjectTypeEXT::eInstance: return "Instance";
22026 case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
22027 case DebugReportObjectTypeEXT::eDevice: return "Device";
22028 case DebugReportObjectTypeEXT::eQueue: return "Queue";
22029 case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
22030 case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
22031 case DebugReportObjectTypeEXT::eFence: return "Fence";
22032 case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
22033 case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
22034 case DebugReportObjectTypeEXT::eImage: return "Image";
22035 case DebugReportObjectTypeEXT::eEvent: return "Event";
22036 case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
22037 case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
22038 case DebugReportObjectTypeEXT::eImageView: return "ImageView";
22039 case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
22040 case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
22041 case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
22042 case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
22043 case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
22044 case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
22045 case DebugReportObjectTypeEXT::eSampler: return "Sampler";
22046 case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
22047 case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
22048 case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
22049 case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
22050 case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
22051 case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
22052 case DebugReportObjectTypeEXT::eDebugReport: return "DebugReport";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022053 case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
22054 case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
22055 case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
22056 case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022057 default: return "invalid";
22058 }
22059 }
22060
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022061 VULKAN_HPP_INLINE std::string to_string(DebugReportErrorEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022062 {
22063 switch (value)
22064 {
22065 case DebugReportErrorEXT::eNone: return "None";
22066 case DebugReportErrorEXT::eCallbackRef: return "CallbackRef";
22067 default: return "invalid";
22068 }
22069 }
22070
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022071 VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022072 {
22073 switch (value)
22074 {
22075 case RasterizationOrderAMD::eStrict: return "Strict";
22076 case RasterizationOrderAMD::eRelaxed: return "Relaxed";
22077 default: return "invalid";
22078 }
22079 }
22080
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022081 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060022082 {
22083 switch (value)
22084 {
22085 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
22086 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
22087 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
22088 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
22089 default: return "invalid";
22090 }
22091 }
22092
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022093 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060022094 {
22095 if (!value) return "{}";
22096 std::string result;
22097 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
22098 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
22099 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
22100 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
22101 return "{" + result.substr(0, result.size() - 3) + "}";
22102 }
22103
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022104 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060022105 {
22106 switch (value)
22107 {
22108 case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
22109 case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
22110 case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
22111 default: return "invalid";
22112 }
22113 }
22114
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022115 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060022116 {
22117 if (!value) return "{}";
22118 std::string result;
22119 if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
22120 if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
22121 if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
22122 return "{" + result.substr(0, result.size() - 3) + "}";
22123 }
22124
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022125 VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
Lenny Komow68432d72016-09-29 14:16:59 -060022126 {
22127 switch (value)
22128 {
22129 case ValidationCheckEXT::eAll: return "All";
22130 default: return "invalid";
22131 }
22132 }
22133
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022134 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
22135 {
22136 switch (value)
22137 {
22138 case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
22139 case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
22140 case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
22141 case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
22142 default: return "invalid";
22143 }
22144 }
22145
22146 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
22147 {
22148 if (!value) return "{}";
22149 std::string result;
22150 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
22151 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
22152 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
22153 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
22154 return "{" + result.substr(0, result.size() - 3) + "}";
22155 }
22156
22157 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
22158 {
22159 switch (value)
22160 {
22161 case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
22162 case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
22163 default: return "invalid";
22164 }
22165 }
22166
22167 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
22168 {
22169 if (!value) return "{}";
22170 std::string result;
22171 if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
22172 if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
22173 return "{" + result.substr(0, result.size() - 3) + "}";
22174 }
22175
22176 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
22177 {
22178 switch (value)
22179 {
22180 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline: return "VkIndirectCommandsTokenPipeline";
22181 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDescriptorSet: return "VkIndirectCommandsTokenDescriptorSet";
22182 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenIndexBuffer: return "VkIndirectCommandsTokenIndexBuffer";
22183 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenVertexBuffer: return "VkIndirectCommandsTokenVertexBuffer";
22184 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPushConstant: return "VkIndirectCommandsTokenPushConstant";
22185 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDrawIndexed: return "VkIndirectCommandsTokenDrawIndexed";
22186 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDraw: return "VkIndirectCommandsTokenDraw";
22187 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDispatch: return "VkIndirectCommandsTokenDispatch";
22188 default: return "invalid";
22189 }
22190 }
22191
22192 VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
22193 {
22194 switch (value)
22195 {
22196 case ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet: return "VkObjectEntryDescriptorSet";
22197 case ObjectEntryTypeNVX::eVkObjectEntryPipeline: return "VkObjectEntryPipeline";
22198 case ObjectEntryTypeNVX::eVkObjectEntryIndexBuffer: return "VkObjectEntryIndexBuffer";
22199 case ObjectEntryTypeNVX::eVkObjectEntryVertexBuffer: return "VkObjectEntryVertexBuffer";
22200 case ObjectEntryTypeNVX::eVkObjectEntryPushConstant: return "VkObjectEntryPushConstant";
22201 default: return "invalid";
22202 }
22203 }
22204
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022205} // namespace vk
22206
22207#endif