blob: dd346512e5bbe72560a1b743f56800c846d9fba0 [file] [log] [blame]
Mark Youngb5f087a2017-01-19 21:10:49 -07001// Copyright (c) 2015-2017 The Khronos Group Inc.
Lenny Komowb0a17f22016-08-11 11:23:15 -06002//
3// Permission is hereby granted, free of charge, to any person obtaining a
4// copy of this software and/or associated documentation files (the
5// "Materials"), to deal in the Materials without restriction, including
6// without limitation the rights to use, copy, modify, merge, publish,
7// distribute, sublicense, and/or sell copies of the Materials, and to
8// permit persons to whom the Materials are furnished to do so, subject to
9// the following conditions:
10//
11// The above copyright notice and this permission notice shall be included
12// in all copies or substantial portions of the Materials.
13//
14// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
21
22// This header is generated from the Khronos Vulkan XML API Registry.
23
24
25#ifndef VULKAN_HPP
26#define VULKAN_HPP
27
28#include <algorithm>
29#include <array>
30#include <cassert>
31#include <cstdint>
32#include <cstring>
33#include <initializer_list>
34#include <string>
35#include <system_error>
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070036#include <tuple>
Lenny Komowb0a17f22016-08-11 11:23:15 -060037#include <type_traits>
38#include <vulkan/vulkan.h>
39#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
40# include <memory>
41# include <vector>
42#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
43
Mark Youngb5f087a2017-01-19 21:10:49 -070044static_assert( VK_HEADER_VERSION == 39 , "Wrong VK_HEADER_VERSION!" );
Lenny Komowb0a17f22016-08-11 11:23:15 -060045
46// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
47// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
Endre Oma30f71a32016-09-01 17:56:41 +020048#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
Lenny Komowb0a17f22016-08-11 11:23:15 -060049#define VULKAN_HPP_TYPESAFE_CONVERSION 1
50#endif
51
52#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
53# if defined(__clang__)
54# if __has_feature(cxx_unrestricted_unions)
55# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
56# endif
57# elif defined(__GNUC__)
Lenny Komow96962992016-08-31 15:03:49 -060058# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
Lenny Komowb0a17f22016-08-11 11:23:15 -060059# if 40600 <= GCC_VERSION
60# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
61# endif
62# elif defined(_MSC_VER)
63# if 1900 <= _MSC_VER
64# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
65# endif
66# endif
67#endif
68
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070069
70#if !defined(VULKAN_HPP_INLINE)
71# if defined(__clang___)
72# if __has_attribute(always_inline)
73# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
74# else
75# define VULKAN_HPP_INLINE inline
76# endif
77# elif defined(__GNUC__)
78# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
79# elif defined(_MSC_VER)
80# define VULKAN_HPP_INLINE __forceinline
81# else
82# define VULKAN_HPP_INLINE inline
83# endif
84#endif
85
Lenny Komowb0a17f22016-08-11 11:23:15 -060086namespace vk
87{
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070088 template <typename FlagBitsType> struct FlagTraits
89 {
90 enum { allFlags = 0 };
91 };
92
Lenny Komowb0a17f22016-08-11 11:23:15 -060093 template <typename BitType, typename MaskType = VkFlags>
94 class Flags
95 {
96 public:
97 Flags()
98 : m_mask(0)
99 {
100 }
101
102 Flags(BitType bit)
103 : m_mask(static_cast<MaskType>(bit))
104 {
105 }
106
107 Flags(Flags<BitType> const& rhs)
108 : m_mask(rhs.m_mask)
109 {
110 }
111
112 Flags<BitType> & operator=(Flags<BitType> const& rhs)
113 {
114 m_mask = rhs.m_mask;
115 return *this;
116 }
117
118 Flags<BitType> & operator|=(Flags<BitType> const& rhs)
119 {
120 m_mask |= rhs.m_mask;
121 return *this;
122 }
123
124 Flags<BitType> & operator&=(Flags<BitType> const& rhs)
125 {
126 m_mask &= rhs.m_mask;
127 return *this;
128 }
129
130 Flags<BitType> & operator^=(Flags<BitType> const& rhs)
131 {
132 m_mask ^= rhs.m_mask;
133 return *this;
134 }
135
136 Flags<BitType> operator|(Flags<BitType> const& rhs) const
137 {
138 Flags<BitType> result(*this);
139 result |= rhs;
140 return result;
141 }
142
143 Flags<BitType> operator&(Flags<BitType> const& rhs) const
144 {
145 Flags<BitType> result(*this);
146 result &= rhs;
147 return result;
148 }
149
150 Flags<BitType> operator^(Flags<BitType> const& rhs) const
151 {
152 Flags<BitType> result(*this);
153 result ^= rhs;
154 return result;
155 }
156
157 bool operator!() const
158 {
159 return !m_mask;
160 }
161
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700162 Flags<BitType> operator~() const
163 {
164 Flags<BitType> result(*this);
165 result.m_mask ^= FlagTraits<BitType>::allFlags;
166 return result;
167 }
168
Lenny Komowb0a17f22016-08-11 11:23:15 -0600169 bool operator==(Flags<BitType> const& rhs) const
170 {
171 return m_mask == rhs.m_mask;
172 }
173
174 bool operator!=(Flags<BitType> const& rhs) const
175 {
176 return m_mask != rhs.m_mask;
177 }
178
179 explicit operator bool() const
180 {
181 return !!m_mask;
182 }
183
184 explicit operator MaskType() const
185 {
186 return m_mask;
187 }
188
189 private:
190 MaskType m_mask;
191 };
192
193 template <typename BitType>
194 Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
195 {
196 return flags | bit;
197 }
198
199 template <typename BitType>
200 Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
201 {
202 return flags & bit;
203 }
204
205 template <typename BitType>
206 Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
207 {
208 return flags ^ bit;
209 }
210
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700211
Lenny Komowb0a17f22016-08-11 11:23:15 -0600212 template <typename RefType>
213 class Optional
214 {
215 public:
216 Optional(RefType & reference) { m_ptr = &reference; }
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700217 Optional(RefType * ptr) { m_ptr = ptr; }
Lenny Komowb0a17f22016-08-11 11:23:15 -0600218 Optional(std::nullptr_t) { m_ptr = nullptr; }
219
220 operator RefType*() const { return m_ptr; }
221 RefType const* operator->() const { return m_ptr; }
222 explicit operator bool() const { return !!m_ptr; }
223
224 private:
225 RefType *m_ptr;
226 };
227
228#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
229 template <typename T>
230 class ArrayProxy
231 {
232 public:
233 ArrayProxy(std::nullptr_t)
234 : m_count(0)
235 , m_ptr(nullptr)
236 {}
237
238 ArrayProxy(T & ptr)
239 : m_count(1)
240 , m_ptr(&ptr)
241 {}
242
243 ArrayProxy(uint32_t count, T * ptr)
244 : m_count(count)
245 , m_ptr(ptr)
246 {}
247
248 template <size_t N>
249 ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
250 : m_count(N)
251 , m_ptr(data.data())
252 {}
253
254 template <size_t N>
255 ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
256 : m_count(N)
257 , m_ptr(data.data())
258 {}
259
260 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
261 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
262 : m_count(static_cast<uint32_t>(data.size()))
263 , m_ptr(data.data())
264 {}
265
266 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
267 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
268 : m_count(static_cast<uint32_t>(data.size()))
269 , m_ptr(data.data())
270 {}
271
272 ArrayProxy(std::initializer_list<T> const& data)
273 : m_count(static_cast<uint32_t>(data.end() - data.begin()))
274 , m_ptr(data.begin())
275 {}
276
277 const T * begin() const
278 {
279 return m_ptr;
280 }
281
282 const T * end() const
283 {
284 return m_ptr + m_count;
285 }
286
287 const T & front() const
288 {
289 assert(m_count && m_ptr);
290 return *m_ptr;
291 }
292
293 const T & back() const
294 {
295 assert(m_count && m_ptr);
296 return *(m_ptr + m_count - 1);
297 }
298
299 bool empty() const
300 {
301 return (m_count == 0);
302 }
303
304 uint32_t size() const
305 {
306 return m_count;
307 }
308
309 T * data() const
310 {
311 return m_ptr;
312 }
313
314 private:
315 uint32_t m_count;
316 T * m_ptr;
317 };
318#endif
319
320 enum class Result
321 {
322 eSuccess = VK_SUCCESS,
323 eNotReady = VK_NOT_READY,
324 eTimeout = VK_TIMEOUT,
325 eEventSet = VK_EVENT_SET,
326 eEventReset = VK_EVENT_RESET,
327 eIncomplete = VK_INCOMPLETE,
328 eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
329 eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
330 eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
331 eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
332 eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
333 eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
334 eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
335 eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
336 eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
337 eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
338 eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
Lenny Komow339ffcd2016-08-26 14:10:08 -0600339 eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
Lenny Komowb0a17f22016-08-11 11:23:15 -0600340 eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
341 eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
342 eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
343 eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
344 eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
345 eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
Mark Youngb5f087a2017-01-19 21:10:49 -0700346 eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
347 eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR
Lenny Komowb0a17f22016-08-11 11:23:15 -0600348 };
349
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700350 VULKAN_HPP_INLINE std::string to_string(Result value)
Lenny Komowb0a17f22016-08-11 11:23:15 -0600351 {
352 switch (value)
353 {
354 case Result::eSuccess: return "Success";
355 case Result::eNotReady: return "NotReady";
356 case Result::eTimeout: return "Timeout";
357 case Result::eEventSet: return "EventSet";
358 case Result::eEventReset: return "EventReset";
359 case Result::eIncomplete: return "Incomplete";
360 case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
361 case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
362 case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
363 case Result::eErrorDeviceLost: return "ErrorDeviceLost";
364 case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
365 case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
366 case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
367 case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
368 case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
369 case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
370 case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
Lenny Komow339ffcd2016-08-26 14:10:08 -0600371 case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
Lenny Komowb0a17f22016-08-11 11:23:15 -0600372 case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
373 case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
374 case Result::eSuboptimalKHR: return "SuboptimalKHR";
375 case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
376 case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
377 case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
378 case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
Mark Youngb5f087a2017-01-19 21:10:49 -0700379 case Result::eErrorOutOfPoolMemoryKHR: return "ErrorOutOfPoolMemoryKHR";
Lenny Komowb0a17f22016-08-11 11:23:15 -0600380 default: return "invalid";
381 }
382 }
383
384#if defined(_MSC_VER) && (_MSC_VER == 1800)
385# define noexcept _NOEXCEPT
386#endif
387
388 class ErrorCategoryImpl : public std::error_category
389 {
390 public:
391 virtual const char* name() const noexcept override { return "vk::Result"; }
392 virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
393 };
394
395#if defined(_MSC_VER) && (_MSC_VER == 1800)
396# undef noexcept
397#endif
398
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700399 VULKAN_HPP_INLINE const std::error_category& errorCategory()
Lenny Komowb0a17f22016-08-11 11:23:15 -0600400 {
401 static ErrorCategoryImpl instance;
402 return instance;
403 }
404
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700405 VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
Lenny Komowb0a17f22016-08-11 11:23:15 -0600406 {
407 return std::error_code(static_cast<int>(e), errorCategory());
408 }
409
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700410 VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
Lenny Komowb0a17f22016-08-11 11:23:15 -0600411 {
412 return std::error_condition(static_cast<int>(e), errorCategory());
413 }
414
415} // namespace vk
416
417namespace std
418{
419 template <>
420 struct is_error_code_enum<vk::Result> : public true_type
421 {};
422}
423
424namespace vk
425{
426 template <typename T>
427 struct ResultValue
428 {
429 ResultValue( Result r, T & v )
430 : result( r )
431 , value( v )
432 {}
433
434 Result result;
435 T value;
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700436
437 operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
Lenny Komowb0a17f22016-08-11 11:23:15 -0600438 };
439
440 template <typename T>
441 struct ResultValueType
442 {
443#ifdef VULKAN_HPP_NO_EXCEPTIONS
444 typedef ResultValue<T> type;
445#else
446 typedef T type;
447#endif
448 };
449
450 template <> struct ResultValueType<void>
451 {
452#ifdef VULKAN_HPP_NO_EXCEPTIONS
453 typedef Result type;
454#else
455 typedef void type;
456#endif
457 };
458
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700459 VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600460 {
461#ifdef VULKAN_HPP_NO_EXCEPTIONS
462 assert( result == Result::eSuccess );
463 return result;
464#else
465 if ( result != Result::eSuccess )
466 {
467 throw std::system_error( result, message );
468 }
469#endif
470 }
471
472 template <typename T>
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700473 VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600474 {
475#ifdef VULKAN_HPP_NO_EXCEPTIONS
476 assert( result == Result::eSuccess );
477 return ResultValue<T>( result, data );
478#else
479 if ( result != Result::eSuccess )
480 {
481 throw std::system_error( result, message );
482 }
483 return data;
484#endif
485 }
486
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700487 VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600488 {
489#ifdef VULKAN_HPP_NO_EXCEPTIONS
490 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
491#else
492 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
493 {
494 throw std::system_error( result, message );
495 }
496#endif
497 return result;
498 }
499
500 template <typename T>
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700501 VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600502 {
503#ifdef VULKAN_HPP_NO_EXCEPTIONS
504 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
505#else
506 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
507 {
508 throw std::system_error( result, message );
509 }
510#endif
511 return ResultValue<T>( result, data );
512 }
513
514 using SampleMask = uint32_t;
515
516 using Bool32 = uint32_t;
517
518 using DeviceSize = uint64_t;
519
520 enum class FramebufferCreateFlagBits
521 {
522 };
523
524 using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
525
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700526 VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600527 {
528 return FramebufferCreateFlags( bit0 ) | bit1;
529 }
530
531 enum class QueryPoolCreateFlagBits
532 {
533 };
534
535 using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
536
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700537 VULKAN_HPP_INLINE QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600538 {
539 return QueryPoolCreateFlags( bit0 ) | bit1;
540 }
541
542 enum class RenderPassCreateFlagBits
543 {
544 };
545
546 using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
547
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700548 VULKAN_HPP_INLINE RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600549 {
550 return RenderPassCreateFlags( bit0 ) | bit1;
551 }
552
553 enum class SamplerCreateFlagBits
554 {
555 };
556
557 using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
558
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700559 VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600560 {
561 return SamplerCreateFlags( bit0 ) | bit1;
562 }
563
564 enum class PipelineLayoutCreateFlagBits
565 {
566 };
567
568 using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
569
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700570 VULKAN_HPP_INLINE PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600571 {
572 return PipelineLayoutCreateFlags( bit0 ) | bit1;
573 }
574
575 enum class PipelineCacheCreateFlagBits
576 {
577 };
578
579 using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
580
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700581 VULKAN_HPP_INLINE PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600582 {
583 return PipelineCacheCreateFlags( bit0 ) | bit1;
584 }
585
586 enum class PipelineDepthStencilStateCreateFlagBits
587 {
588 };
589
590 using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
591
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700592 VULKAN_HPP_INLINE PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600593 {
594 return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
595 }
596
597 enum class PipelineDynamicStateCreateFlagBits
598 {
599 };
600
601 using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
602
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700603 VULKAN_HPP_INLINE PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600604 {
605 return PipelineDynamicStateCreateFlags( bit0 ) | bit1;
606 }
607
608 enum class PipelineColorBlendStateCreateFlagBits
609 {
610 };
611
612 using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
613
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700614 VULKAN_HPP_INLINE PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600615 {
616 return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
617 }
618
619 enum class PipelineMultisampleStateCreateFlagBits
620 {
621 };
622
623 using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
624
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700625 VULKAN_HPP_INLINE PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600626 {
627 return PipelineMultisampleStateCreateFlags( bit0 ) | bit1;
628 }
629
630 enum class PipelineRasterizationStateCreateFlagBits
631 {
632 };
633
634 using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
635
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700636 VULKAN_HPP_INLINE PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600637 {
638 return PipelineRasterizationStateCreateFlags( bit0 ) | bit1;
639 }
640
641 enum class PipelineViewportStateCreateFlagBits
642 {
643 };
644
645 using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
646
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700647 VULKAN_HPP_INLINE PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600648 {
649 return PipelineViewportStateCreateFlags( bit0 ) | bit1;
650 }
651
652 enum class PipelineTessellationStateCreateFlagBits
653 {
654 };
655
656 using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
657
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700658 VULKAN_HPP_INLINE PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600659 {
660 return PipelineTessellationStateCreateFlags( bit0 ) | bit1;
661 }
662
663 enum class PipelineInputAssemblyStateCreateFlagBits
664 {
665 };
666
667 using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
668
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700669 VULKAN_HPP_INLINE PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600670 {
671 return PipelineInputAssemblyStateCreateFlags( bit0 ) | bit1;
672 }
673
674 enum class PipelineVertexInputStateCreateFlagBits
675 {
676 };
677
678 using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
679
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700680 VULKAN_HPP_INLINE PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600681 {
682 return PipelineVertexInputStateCreateFlags( bit0 ) | bit1;
683 }
684
685 enum class PipelineShaderStageCreateFlagBits
686 {
687 };
688
689 using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
690
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700691 VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600692 {
693 return PipelineShaderStageCreateFlags( bit0 ) | bit1;
694 }
695
696 enum class DescriptorSetLayoutCreateFlagBits
697 {
698 };
699
700 using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
701
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700702 VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600703 {
704 return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
705 }
706
707 enum class BufferViewCreateFlagBits
708 {
709 };
710
711 using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
712
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700713 VULKAN_HPP_INLINE BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600714 {
715 return BufferViewCreateFlags( bit0 ) | bit1;
716 }
717
718 enum class InstanceCreateFlagBits
719 {
720 };
721
722 using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
723
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700724 VULKAN_HPP_INLINE InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600725 {
726 return InstanceCreateFlags( bit0 ) | bit1;
727 }
728
729 enum class DeviceCreateFlagBits
730 {
731 };
732
733 using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
734
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700735 VULKAN_HPP_INLINE DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600736 {
737 return DeviceCreateFlags( bit0 ) | bit1;
738 }
739
740 enum class DeviceQueueCreateFlagBits
741 {
742 };
743
744 using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
745
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700746 VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600747 {
748 return DeviceQueueCreateFlags( bit0 ) | bit1;
749 }
750
751 enum class ImageViewCreateFlagBits
752 {
753 };
754
755 using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
756
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700757 VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600758 {
759 return ImageViewCreateFlags( bit0 ) | bit1;
760 }
761
762 enum class SemaphoreCreateFlagBits
763 {
764 };
765
766 using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
767
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700768 VULKAN_HPP_INLINE SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600769 {
770 return SemaphoreCreateFlags( bit0 ) | bit1;
771 }
772
773 enum class ShaderModuleCreateFlagBits
774 {
775 };
776
777 using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
778
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700779 VULKAN_HPP_INLINE ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600780 {
781 return ShaderModuleCreateFlags( bit0 ) | bit1;
782 }
783
784 enum class EventCreateFlagBits
785 {
786 };
787
788 using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
789
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700790 VULKAN_HPP_INLINE EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600791 {
792 return EventCreateFlags( bit0 ) | bit1;
793 }
794
795 enum class MemoryMapFlagBits
796 {
797 };
798
799 using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
800
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700801 VULKAN_HPP_INLINE MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600802 {
803 return MemoryMapFlags( bit0 ) | bit1;
804 }
805
806 enum class SubpassDescriptionFlagBits
807 {
808 };
809
810 using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
811
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700812 VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600813 {
814 return SubpassDescriptionFlags( bit0 ) | bit1;
815 }
816
817 enum class DescriptorPoolResetFlagBits
818 {
819 };
820
821 using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
822
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700823 VULKAN_HPP_INLINE DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600824 {
825 return DescriptorPoolResetFlags( bit0 ) | bit1;
826 }
827
828 enum class SwapchainCreateFlagBitsKHR
829 {
830 };
831
832 using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
833
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700834 VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600835 {
836 return SwapchainCreateFlagsKHR( bit0 ) | bit1;
837 }
838
839 enum class DisplayModeCreateFlagBitsKHR
840 {
841 };
842
843 using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
844
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700845 VULKAN_HPP_INLINE DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600846 {
847 return DisplayModeCreateFlagsKHR( bit0 ) | bit1;
848 }
849
850 enum class DisplaySurfaceCreateFlagBitsKHR
851 {
852 };
853
854 using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
855
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700856 VULKAN_HPP_INLINE DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600857 {
858 return DisplaySurfaceCreateFlagsKHR( bit0 ) | bit1;
859 }
860
861#ifdef VK_USE_PLATFORM_ANDROID_KHR
862 enum class AndroidSurfaceCreateFlagBitsKHR
863 {
864 };
865#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
866
867#ifdef VK_USE_PLATFORM_ANDROID_KHR
868 using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
869
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700870 VULKAN_HPP_INLINE AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600871 {
872 return AndroidSurfaceCreateFlagsKHR( bit0 ) | bit1;
873 }
874#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
875
876#ifdef VK_USE_PLATFORM_MIR_KHR
877 enum class MirSurfaceCreateFlagBitsKHR
878 {
879 };
880#endif /*VK_USE_PLATFORM_MIR_KHR*/
881
882#ifdef VK_USE_PLATFORM_MIR_KHR
883 using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
884
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700885 VULKAN_HPP_INLINE MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600886 {
887 return MirSurfaceCreateFlagsKHR( bit0 ) | bit1;
888 }
889#endif /*VK_USE_PLATFORM_MIR_KHR*/
890
Mark Youngb5f087a2017-01-19 21:10:49 -0700891#ifdef VK_USE_PLATFORM_VI_NN
892 enum class ViSurfaceCreateFlagBitsNN
893 {
894 };
895#endif /*VK_USE_PLATFORM_VI_NN*/
896
897#ifdef VK_USE_PLATFORM_VI_NN
898 using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
899
900 VULKAN_HPP_INLINE ViSurfaceCreateFlagsNN operator|( ViSurfaceCreateFlagBitsNN bit0, ViSurfaceCreateFlagBitsNN bit1 )
901 {
902 return ViSurfaceCreateFlagsNN( bit0 ) | bit1;
903 }
904#endif /*VK_USE_PLATFORM_VI_NN*/
905
Lenny Komowb0a17f22016-08-11 11:23:15 -0600906#ifdef VK_USE_PLATFORM_WAYLAND_KHR
907 enum class WaylandSurfaceCreateFlagBitsKHR
908 {
909 };
910#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
911
912#ifdef VK_USE_PLATFORM_WAYLAND_KHR
913 using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
914
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700915 VULKAN_HPP_INLINE WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600916 {
917 return WaylandSurfaceCreateFlagsKHR( bit0 ) | bit1;
918 }
919#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
920
921#ifdef VK_USE_PLATFORM_WIN32_KHR
922 enum class Win32SurfaceCreateFlagBitsKHR
923 {
924 };
925#endif /*VK_USE_PLATFORM_WIN32_KHR*/
926
927#ifdef VK_USE_PLATFORM_WIN32_KHR
928 using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
929
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700930 VULKAN_HPP_INLINE Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600931 {
932 return Win32SurfaceCreateFlagsKHR( bit0 ) | bit1;
933 }
934#endif /*VK_USE_PLATFORM_WIN32_KHR*/
935
936#ifdef VK_USE_PLATFORM_XLIB_KHR
937 enum class XlibSurfaceCreateFlagBitsKHR
938 {
939 };
940#endif /*VK_USE_PLATFORM_XLIB_KHR*/
941
942#ifdef VK_USE_PLATFORM_XLIB_KHR
943 using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
944
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700945 VULKAN_HPP_INLINE XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600946 {
947 return XlibSurfaceCreateFlagsKHR( bit0 ) | bit1;
948 }
949#endif /*VK_USE_PLATFORM_XLIB_KHR*/
950
951#ifdef VK_USE_PLATFORM_XCB_KHR
952 enum class XcbSurfaceCreateFlagBitsKHR
953 {
954 };
955#endif /*VK_USE_PLATFORM_XCB_KHR*/
956
957#ifdef VK_USE_PLATFORM_XCB_KHR
958 using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
959
Mark Lobodzinski7e418a42016-12-12 09:44:34 -0700960 VULKAN_HPP_INLINE XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -0600961 {
962 return XcbSurfaceCreateFlagsKHR( bit0 ) | bit1;
963 }
964#endif /*VK_USE_PLATFORM_XCB_KHR*/
965
Mark Youngb5f087a2017-01-19 21:10:49 -0700966 enum class CommandPoolTrimFlagBitsKHR
967 {
968 };
969
970 using CommandPoolTrimFlagsKHR = Flags<CommandPoolTrimFlagBitsKHR, VkCommandPoolTrimFlagsKHR>;
971
972 VULKAN_HPP_INLINE CommandPoolTrimFlagsKHR operator|( CommandPoolTrimFlagBitsKHR bit0, CommandPoolTrimFlagBitsKHR bit1 )
973 {
974 return CommandPoolTrimFlagsKHR( bit0 ) | bit1;
975 }
976
Lenny Komowb0a17f22016-08-11 11:23:15 -0600977 class DeviceMemory
978 {
979 public:
980 DeviceMemory()
981 : m_deviceMemory(VK_NULL_HANDLE)
982 {}
983
984#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
985 DeviceMemory(VkDeviceMemory deviceMemory)
986 : m_deviceMemory(deviceMemory)
987 {}
988
989 DeviceMemory& operator=(VkDeviceMemory deviceMemory)
990 {
991 m_deviceMemory = deviceMemory;
992 return *this;
993 }
994#endif
995
Lenny Komow339ffcd2016-08-26 14:10:08 -0600996 bool operator==(DeviceMemory const &rhs) const
997 {
998 return m_deviceMemory == rhs.m_deviceMemory;
999 }
1000
1001 bool operator!=(DeviceMemory const &rhs) const
1002 {
1003 return m_deviceMemory != rhs.m_deviceMemory;
1004 }
1005
1006 bool operator<(DeviceMemory const &rhs) const
1007 {
1008 return m_deviceMemory < rhs.m_deviceMemory;
1009 }
1010
Lenny Komowb0a17f22016-08-11 11:23:15 -06001011#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1012 explicit
1013#endif
1014 operator VkDeviceMemory() const
1015 {
1016 return m_deviceMemory;
1017 }
1018
1019 explicit operator bool() const
1020 {
1021 return m_deviceMemory != VK_NULL_HANDLE;
1022 }
1023
1024 bool operator!() const
1025 {
1026 return m_deviceMemory == VK_NULL_HANDLE;
1027 }
1028
1029 private:
1030 VkDeviceMemory m_deviceMemory;
1031 };
1032 static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
1033
1034 class CommandPool
1035 {
1036 public:
1037 CommandPool()
1038 : m_commandPool(VK_NULL_HANDLE)
1039 {}
1040
1041#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1042 CommandPool(VkCommandPool commandPool)
1043 : m_commandPool(commandPool)
1044 {}
1045
1046 CommandPool& operator=(VkCommandPool commandPool)
1047 {
1048 m_commandPool = commandPool;
1049 return *this;
1050 }
1051#endif
1052
Lenny Komow339ffcd2016-08-26 14:10:08 -06001053 bool operator==(CommandPool const &rhs) const
1054 {
1055 return m_commandPool == rhs.m_commandPool;
1056 }
1057
1058 bool operator!=(CommandPool const &rhs) const
1059 {
1060 return m_commandPool != rhs.m_commandPool;
1061 }
1062
1063 bool operator<(CommandPool const &rhs) const
1064 {
1065 return m_commandPool < rhs.m_commandPool;
1066 }
1067
Lenny Komowb0a17f22016-08-11 11:23:15 -06001068#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1069 explicit
1070#endif
1071 operator VkCommandPool() const
1072 {
1073 return m_commandPool;
1074 }
1075
1076 explicit operator bool() const
1077 {
1078 return m_commandPool != VK_NULL_HANDLE;
1079 }
1080
1081 bool operator!() const
1082 {
1083 return m_commandPool == VK_NULL_HANDLE;
1084 }
1085
1086 private:
1087 VkCommandPool m_commandPool;
1088 };
1089 static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
1090
1091 class Buffer
1092 {
1093 public:
1094 Buffer()
1095 : m_buffer(VK_NULL_HANDLE)
1096 {}
1097
1098#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1099 Buffer(VkBuffer buffer)
1100 : m_buffer(buffer)
1101 {}
1102
1103 Buffer& operator=(VkBuffer buffer)
1104 {
1105 m_buffer = buffer;
1106 return *this;
1107 }
1108#endif
1109
Lenny Komow339ffcd2016-08-26 14:10:08 -06001110 bool operator==(Buffer const &rhs) const
1111 {
1112 return m_buffer == rhs.m_buffer;
1113 }
1114
1115 bool operator!=(Buffer const &rhs) const
1116 {
1117 return m_buffer != rhs.m_buffer;
1118 }
1119
1120 bool operator<(Buffer const &rhs) const
1121 {
1122 return m_buffer < rhs.m_buffer;
1123 }
1124
Lenny Komowb0a17f22016-08-11 11:23:15 -06001125#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1126 explicit
1127#endif
1128 operator VkBuffer() const
1129 {
1130 return m_buffer;
1131 }
1132
1133 explicit operator bool() const
1134 {
1135 return m_buffer != VK_NULL_HANDLE;
1136 }
1137
1138 bool operator!() const
1139 {
1140 return m_buffer == VK_NULL_HANDLE;
1141 }
1142
1143 private:
1144 VkBuffer m_buffer;
1145 };
1146 static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
1147
1148 class BufferView
1149 {
1150 public:
1151 BufferView()
1152 : m_bufferView(VK_NULL_HANDLE)
1153 {}
1154
1155#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1156 BufferView(VkBufferView bufferView)
1157 : m_bufferView(bufferView)
1158 {}
1159
1160 BufferView& operator=(VkBufferView bufferView)
1161 {
1162 m_bufferView = bufferView;
1163 return *this;
1164 }
1165#endif
1166
Lenny Komow339ffcd2016-08-26 14:10:08 -06001167 bool operator==(BufferView const &rhs) const
1168 {
1169 return m_bufferView == rhs.m_bufferView;
1170 }
1171
1172 bool operator!=(BufferView const &rhs) const
1173 {
1174 return m_bufferView != rhs.m_bufferView;
1175 }
1176
1177 bool operator<(BufferView const &rhs) const
1178 {
1179 return m_bufferView < rhs.m_bufferView;
1180 }
1181
Lenny Komowb0a17f22016-08-11 11:23:15 -06001182#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1183 explicit
1184#endif
1185 operator VkBufferView() const
1186 {
1187 return m_bufferView;
1188 }
1189
1190 explicit operator bool() const
1191 {
1192 return m_bufferView != VK_NULL_HANDLE;
1193 }
1194
1195 bool operator!() const
1196 {
1197 return m_bufferView == VK_NULL_HANDLE;
1198 }
1199
1200 private:
1201 VkBufferView m_bufferView;
1202 };
1203 static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
1204
1205 class Image
1206 {
1207 public:
1208 Image()
1209 : m_image(VK_NULL_HANDLE)
1210 {}
1211
1212#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1213 Image(VkImage image)
1214 : m_image(image)
1215 {}
1216
1217 Image& operator=(VkImage image)
1218 {
1219 m_image = image;
1220 return *this;
1221 }
1222#endif
1223
Lenny Komow339ffcd2016-08-26 14:10:08 -06001224 bool operator==(Image const &rhs) const
1225 {
1226 return m_image == rhs.m_image;
1227 }
1228
1229 bool operator!=(Image const &rhs) const
1230 {
1231 return m_image != rhs.m_image;
1232 }
1233
1234 bool operator<(Image const &rhs) const
1235 {
1236 return m_image < rhs.m_image;
1237 }
1238
Lenny Komowb0a17f22016-08-11 11:23:15 -06001239#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1240 explicit
1241#endif
1242 operator VkImage() const
1243 {
1244 return m_image;
1245 }
1246
1247 explicit operator bool() const
1248 {
1249 return m_image != VK_NULL_HANDLE;
1250 }
1251
1252 bool operator!() const
1253 {
1254 return m_image == VK_NULL_HANDLE;
1255 }
1256
1257 private:
1258 VkImage m_image;
1259 };
1260 static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
1261
1262 class ImageView
1263 {
1264 public:
1265 ImageView()
1266 : m_imageView(VK_NULL_HANDLE)
1267 {}
1268
1269#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1270 ImageView(VkImageView imageView)
1271 : m_imageView(imageView)
1272 {}
1273
1274 ImageView& operator=(VkImageView imageView)
1275 {
1276 m_imageView = imageView;
1277 return *this;
1278 }
1279#endif
1280
Lenny Komow339ffcd2016-08-26 14:10:08 -06001281 bool operator==(ImageView const &rhs) const
1282 {
1283 return m_imageView == rhs.m_imageView;
1284 }
1285
1286 bool operator!=(ImageView const &rhs) const
1287 {
1288 return m_imageView != rhs.m_imageView;
1289 }
1290
1291 bool operator<(ImageView const &rhs) const
1292 {
1293 return m_imageView < rhs.m_imageView;
1294 }
1295
Lenny Komowb0a17f22016-08-11 11:23:15 -06001296#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1297 explicit
1298#endif
1299 operator VkImageView() const
1300 {
1301 return m_imageView;
1302 }
1303
1304 explicit operator bool() const
1305 {
1306 return m_imageView != VK_NULL_HANDLE;
1307 }
1308
1309 bool operator!() const
1310 {
1311 return m_imageView == VK_NULL_HANDLE;
1312 }
1313
1314 private:
1315 VkImageView m_imageView;
1316 };
1317 static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
1318
1319 class ShaderModule
1320 {
1321 public:
1322 ShaderModule()
1323 : m_shaderModule(VK_NULL_HANDLE)
1324 {}
1325
1326#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1327 ShaderModule(VkShaderModule shaderModule)
1328 : m_shaderModule(shaderModule)
1329 {}
1330
1331 ShaderModule& operator=(VkShaderModule shaderModule)
1332 {
1333 m_shaderModule = shaderModule;
1334 return *this;
1335 }
1336#endif
1337
Lenny Komow339ffcd2016-08-26 14:10:08 -06001338 bool operator==(ShaderModule const &rhs) const
1339 {
1340 return m_shaderModule == rhs.m_shaderModule;
1341 }
1342
1343 bool operator!=(ShaderModule const &rhs) const
1344 {
1345 return m_shaderModule != rhs.m_shaderModule;
1346 }
1347
1348 bool operator<(ShaderModule const &rhs) const
1349 {
1350 return m_shaderModule < rhs.m_shaderModule;
1351 }
1352
Lenny Komowb0a17f22016-08-11 11:23:15 -06001353#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1354 explicit
1355#endif
1356 operator VkShaderModule() const
1357 {
1358 return m_shaderModule;
1359 }
1360
1361 explicit operator bool() const
1362 {
1363 return m_shaderModule != VK_NULL_HANDLE;
1364 }
1365
1366 bool operator!() const
1367 {
1368 return m_shaderModule == VK_NULL_HANDLE;
1369 }
1370
1371 private:
1372 VkShaderModule m_shaderModule;
1373 };
1374 static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
1375
1376 class Pipeline
1377 {
1378 public:
1379 Pipeline()
1380 : m_pipeline(VK_NULL_HANDLE)
1381 {}
1382
1383#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1384 Pipeline(VkPipeline pipeline)
1385 : m_pipeline(pipeline)
1386 {}
1387
1388 Pipeline& operator=(VkPipeline pipeline)
1389 {
1390 m_pipeline = pipeline;
1391 return *this;
1392 }
1393#endif
1394
Lenny Komow339ffcd2016-08-26 14:10:08 -06001395 bool operator==(Pipeline const &rhs) const
1396 {
1397 return m_pipeline == rhs.m_pipeline;
1398 }
1399
1400 bool operator!=(Pipeline const &rhs) const
1401 {
1402 return m_pipeline != rhs.m_pipeline;
1403 }
1404
1405 bool operator<(Pipeline const &rhs) const
1406 {
1407 return m_pipeline < rhs.m_pipeline;
1408 }
1409
Lenny Komowb0a17f22016-08-11 11:23:15 -06001410#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1411 explicit
1412#endif
1413 operator VkPipeline() const
1414 {
1415 return m_pipeline;
1416 }
1417
1418 explicit operator bool() const
1419 {
1420 return m_pipeline != VK_NULL_HANDLE;
1421 }
1422
1423 bool operator!() const
1424 {
1425 return m_pipeline == VK_NULL_HANDLE;
1426 }
1427
1428 private:
1429 VkPipeline m_pipeline;
1430 };
1431 static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
1432
1433 class PipelineLayout
1434 {
1435 public:
1436 PipelineLayout()
1437 : m_pipelineLayout(VK_NULL_HANDLE)
1438 {}
1439
1440#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1441 PipelineLayout(VkPipelineLayout pipelineLayout)
1442 : m_pipelineLayout(pipelineLayout)
1443 {}
1444
1445 PipelineLayout& operator=(VkPipelineLayout pipelineLayout)
1446 {
1447 m_pipelineLayout = pipelineLayout;
1448 return *this;
1449 }
1450#endif
1451
Lenny Komow339ffcd2016-08-26 14:10:08 -06001452 bool operator==(PipelineLayout const &rhs) const
1453 {
1454 return m_pipelineLayout == rhs.m_pipelineLayout;
1455 }
1456
1457 bool operator!=(PipelineLayout const &rhs) const
1458 {
1459 return m_pipelineLayout != rhs.m_pipelineLayout;
1460 }
1461
1462 bool operator<(PipelineLayout const &rhs) const
1463 {
1464 return m_pipelineLayout < rhs.m_pipelineLayout;
1465 }
1466
Lenny Komowb0a17f22016-08-11 11:23:15 -06001467#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1468 explicit
1469#endif
1470 operator VkPipelineLayout() const
1471 {
1472 return m_pipelineLayout;
1473 }
1474
1475 explicit operator bool() const
1476 {
1477 return m_pipelineLayout != VK_NULL_HANDLE;
1478 }
1479
1480 bool operator!() const
1481 {
1482 return m_pipelineLayout == VK_NULL_HANDLE;
1483 }
1484
1485 private:
1486 VkPipelineLayout m_pipelineLayout;
1487 };
1488 static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
1489
1490 class Sampler
1491 {
1492 public:
1493 Sampler()
1494 : m_sampler(VK_NULL_HANDLE)
1495 {}
1496
1497#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1498 Sampler(VkSampler sampler)
1499 : m_sampler(sampler)
1500 {}
1501
1502 Sampler& operator=(VkSampler sampler)
1503 {
1504 m_sampler = sampler;
1505 return *this;
1506 }
1507#endif
1508
Lenny Komow339ffcd2016-08-26 14:10:08 -06001509 bool operator==(Sampler const &rhs) const
1510 {
1511 return m_sampler == rhs.m_sampler;
1512 }
1513
1514 bool operator!=(Sampler const &rhs) const
1515 {
1516 return m_sampler != rhs.m_sampler;
1517 }
1518
1519 bool operator<(Sampler const &rhs) const
1520 {
1521 return m_sampler < rhs.m_sampler;
1522 }
1523
Lenny Komowb0a17f22016-08-11 11:23:15 -06001524#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1525 explicit
1526#endif
1527 operator VkSampler() const
1528 {
1529 return m_sampler;
1530 }
1531
1532 explicit operator bool() const
1533 {
1534 return m_sampler != VK_NULL_HANDLE;
1535 }
1536
1537 bool operator!() const
1538 {
1539 return m_sampler == VK_NULL_HANDLE;
1540 }
1541
1542 private:
1543 VkSampler m_sampler;
1544 };
1545 static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
1546
1547 class DescriptorSet
1548 {
1549 public:
1550 DescriptorSet()
1551 : m_descriptorSet(VK_NULL_HANDLE)
1552 {}
1553
1554#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1555 DescriptorSet(VkDescriptorSet descriptorSet)
1556 : m_descriptorSet(descriptorSet)
1557 {}
1558
1559 DescriptorSet& operator=(VkDescriptorSet descriptorSet)
1560 {
1561 m_descriptorSet = descriptorSet;
1562 return *this;
1563 }
1564#endif
1565
Lenny Komow339ffcd2016-08-26 14:10:08 -06001566 bool operator==(DescriptorSet const &rhs) const
1567 {
1568 return m_descriptorSet == rhs.m_descriptorSet;
1569 }
1570
1571 bool operator!=(DescriptorSet const &rhs) const
1572 {
1573 return m_descriptorSet != rhs.m_descriptorSet;
1574 }
1575
1576 bool operator<(DescriptorSet const &rhs) const
1577 {
1578 return m_descriptorSet < rhs.m_descriptorSet;
1579 }
1580
Lenny Komowb0a17f22016-08-11 11:23:15 -06001581#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1582 explicit
1583#endif
1584 operator VkDescriptorSet() const
1585 {
1586 return m_descriptorSet;
1587 }
1588
1589 explicit operator bool() const
1590 {
1591 return m_descriptorSet != VK_NULL_HANDLE;
1592 }
1593
1594 bool operator!() const
1595 {
1596 return m_descriptorSet == VK_NULL_HANDLE;
1597 }
1598
1599 private:
1600 VkDescriptorSet m_descriptorSet;
1601 };
1602 static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
1603
1604 class DescriptorSetLayout
1605 {
1606 public:
1607 DescriptorSetLayout()
1608 : m_descriptorSetLayout(VK_NULL_HANDLE)
1609 {}
1610
1611#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1612 DescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout)
1613 : m_descriptorSetLayout(descriptorSetLayout)
1614 {}
1615
1616 DescriptorSetLayout& operator=(VkDescriptorSetLayout descriptorSetLayout)
1617 {
1618 m_descriptorSetLayout = descriptorSetLayout;
1619 return *this;
1620 }
1621#endif
1622
Lenny Komow339ffcd2016-08-26 14:10:08 -06001623 bool operator==(DescriptorSetLayout const &rhs) const
1624 {
1625 return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
1626 }
1627
1628 bool operator!=(DescriptorSetLayout const &rhs) const
1629 {
1630 return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
1631 }
1632
1633 bool operator<(DescriptorSetLayout const &rhs) const
1634 {
1635 return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
1636 }
1637
Lenny Komowb0a17f22016-08-11 11:23:15 -06001638#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1639 explicit
1640#endif
1641 operator VkDescriptorSetLayout() const
1642 {
1643 return m_descriptorSetLayout;
1644 }
1645
1646 explicit operator bool() const
1647 {
1648 return m_descriptorSetLayout != VK_NULL_HANDLE;
1649 }
1650
1651 bool operator!() const
1652 {
1653 return m_descriptorSetLayout == VK_NULL_HANDLE;
1654 }
1655
1656 private:
1657 VkDescriptorSetLayout m_descriptorSetLayout;
1658 };
1659 static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
1660
1661 class DescriptorPool
1662 {
1663 public:
1664 DescriptorPool()
1665 : m_descriptorPool(VK_NULL_HANDLE)
1666 {}
1667
1668#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1669 DescriptorPool(VkDescriptorPool descriptorPool)
1670 : m_descriptorPool(descriptorPool)
1671 {}
1672
1673 DescriptorPool& operator=(VkDescriptorPool descriptorPool)
1674 {
1675 m_descriptorPool = descriptorPool;
1676 return *this;
1677 }
1678#endif
1679
Lenny Komow339ffcd2016-08-26 14:10:08 -06001680 bool operator==(DescriptorPool const &rhs) const
1681 {
1682 return m_descriptorPool == rhs.m_descriptorPool;
1683 }
1684
1685 bool operator!=(DescriptorPool const &rhs) const
1686 {
1687 return m_descriptorPool != rhs.m_descriptorPool;
1688 }
1689
1690 bool operator<(DescriptorPool const &rhs) const
1691 {
1692 return m_descriptorPool < rhs.m_descriptorPool;
1693 }
1694
Lenny Komowb0a17f22016-08-11 11:23:15 -06001695#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1696 explicit
1697#endif
1698 operator VkDescriptorPool() const
1699 {
1700 return m_descriptorPool;
1701 }
1702
1703 explicit operator bool() const
1704 {
1705 return m_descriptorPool != VK_NULL_HANDLE;
1706 }
1707
1708 bool operator!() const
1709 {
1710 return m_descriptorPool == VK_NULL_HANDLE;
1711 }
1712
1713 private:
1714 VkDescriptorPool m_descriptorPool;
1715 };
1716 static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
1717
1718 class Fence
1719 {
1720 public:
1721 Fence()
1722 : m_fence(VK_NULL_HANDLE)
1723 {}
1724
1725#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1726 Fence(VkFence fence)
1727 : m_fence(fence)
1728 {}
1729
1730 Fence& operator=(VkFence fence)
1731 {
1732 m_fence = fence;
1733 return *this;
1734 }
1735#endif
1736
Lenny Komow339ffcd2016-08-26 14:10:08 -06001737 bool operator==(Fence const &rhs) const
1738 {
1739 return m_fence == rhs.m_fence;
1740 }
1741
1742 bool operator!=(Fence const &rhs) const
1743 {
1744 return m_fence != rhs.m_fence;
1745 }
1746
1747 bool operator<(Fence const &rhs) const
1748 {
1749 return m_fence < rhs.m_fence;
1750 }
1751
Lenny Komowb0a17f22016-08-11 11:23:15 -06001752#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1753 explicit
1754#endif
1755 operator VkFence() const
1756 {
1757 return m_fence;
1758 }
1759
1760 explicit operator bool() const
1761 {
1762 return m_fence != VK_NULL_HANDLE;
1763 }
1764
1765 bool operator!() const
1766 {
1767 return m_fence == VK_NULL_HANDLE;
1768 }
1769
1770 private:
1771 VkFence m_fence;
1772 };
1773 static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
1774
1775 class Semaphore
1776 {
1777 public:
1778 Semaphore()
1779 : m_semaphore(VK_NULL_HANDLE)
1780 {}
1781
1782#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1783 Semaphore(VkSemaphore semaphore)
1784 : m_semaphore(semaphore)
1785 {}
1786
1787 Semaphore& operator=(VkSemaphore semaphore)
1788 {
1789 m_semaphore = semaphore;
1790 return *this;
1791 }
1792#endif
1793
Lenny Komow339ffcd2016-08-26 14:10:08 -06001794 bool operator==(Semaphore const &rhs) const
1795 {
1796 return m_semaphore == rhs.m_semaphore;
1797 }
1798
1799 bool operator!=(Semaphore const &rhs) const
1800 {
1801 return m_semaphore != rhs.m_semaphore;
1802 }
1803
1804 bool operator<(Semaphore const &rhs) const
1805 {
1806 return m_semaphore < rhs.m_semaphore;
1807 }
1808
Lenny Komowb0a17f22016-08-11 11:23:15 -06001809#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1810 explicit
1811#endif
1812 operator VkSemaphore() const
1813 {
1814 return m_semaphore;
1815 }
1816
1817 explicit operator bool() const
1818 {
1819 return m_semaphore != VK_NULL_HANDLE;
1820 }
1821
1822 bool operator!() const
1823 {
1824 return m_semaphore == VK_NULL_HANDLE;
1825 }
1826
1827 private:
1828 VkSemaphore m_semaphore;
1829 };
1830 static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
1831
1832 class Event
1833 {
1834 public:
1835 Event()
1836 : m_event(VK_NULL_HANDLE)
1837 {}
1838
1839#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1840 Event(VkEvent event)
1841 : m_event(event)
1842 {}
1843
1844 Event& operator=(VkEvent event)
1845 {
1846 m_event = event;
1847 return *this;
1848 }
1849#endif
1850
Lenny Komow339ffcd2016-08-26 14:10:08 -06001851 bool operator==(Event const &rhs) const
1852 {
1853 return m_event == rhs.m_event;
1854 }
1855
1856 bool operator!=(Event const &rhs) const
1857 {
1858 return m_event != rhs.m_event;
1859 }
1860
1861 bool operator<(Event const &rhs) const
1862 {
1863 return m_event < rhs.m_event;
1864 }
1865
Lenny Komowb0a17f22016-08-11 11:23:15 -06001866#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1867 explicit
1868#endif
1869 operator VkEvent() const
1870 {
1871 return m_event;
1872 }
1873
1874 explicit operator bool() const
1875 {
1876 return m_event != VK_NULL_HANDLE;
1877 }
1878
1879 bool operator!() const
1880 {
1881 return m_event == VK_NULL_HANDLE;
1882 }
1883
1884 private:
1885 VkEvent m_event;
1886 };
1887 static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
1888
1889 class QueryPool
1890 {
1891 public:
1892 QueryPool()
1893 : m_queryPool(VK_NULL_HANDLE)
1894 {}
1895
1896#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1897 QueryPool(VkQueryPool queryPool)
1898 : m_queryPool(queryPool)
1899 {}
1900
1901 QueryPool& operator=(VkQueryPool queryPool)
1902 {
1903 m_queryPool = queryPool;
1904 return *this;
1905 }
1906#endif
1907
Lenny Komow339ffcd2016-08-26 14:10:08 -06001908 bool operator==(QueryPool const &rhs) const
1909 {
1910 return m_queryPool == rhs.m_queryPool;
1911 }
1912
1913 bool operator!=(QueryPool const &rhs) const
1914 {
1915 return m_queryPool != rhs.m_queryPool;
1916 }
1917
1918 bool operator<(QueryPool const &rhs) const
1919 {
1920 return m_queryPool < rhs.m_queryPool;
1921 }
1922
Lenny Komowb0a17f22016-08-11 11:23:15 -06001923#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1924 explicit
1925#endif
1926 operator VkQueryPool() const
1927 {
1928 return m_queryPool;
1929 }
1930
1931 explicit operator bool() const
1932 {
1933 return m_queryPool != VK_NULL_HANDLE;
1934 }
1935
1936 bool operator!() const
1937 {
1938 return m_queryPool == VK_NULL_HANDLE;
1939 }
1940
1941 private:
1942 VkQueryPool m_queryPool;
1943 };
1944 static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
1945
1946 class Framebuffer
1947 {
1948 public:
1949 Framebuffer()
1950 : m_framebuffer(VK_NULL_HANDLE)
1951 {}
1952
1953#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1954 Framebuffer(VkFramebuffer framebuffer)
1955 : m_framebuffer(framebuffer)
1956 {}
1957
1958 Framebuffer& operator=(VkFramebuffer framebuffer)
1959 {
1960 m_framebuffer = framebuffer;
1961 return *this;
1962 }
1963#endif
1964
Lenny Komow339ffcd2016-08-26 14:10:08 -06001965 bool operator==(Framebuffer const &rhs) const
1966 {
1967 return m_framebuffer == rhs.m_framebuffer;
1968 }
1969
1970 bool operator!=(Framebuffer const &rhs) const
1971 {
1972 return m_framebuffer != rhs.m_framebuffer;
1973 }
1974
1975 bool operator<(Framebuffer const &rhs) const
1976 {
1977 return m_framebuffer < rhs.m_framebuffer;
1978 }
1979
Lenny Komowb0a17f22016-08-11 11:23:15 -06001980#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1981 explicit
1982#endif
1983 operator VkFramebuffer() const
1984 {
1985 return m_framebuffer;
1986 }
1987
1988 explicit operator bool() const
1989 {
1990 return m_framebuffer != VK_NULL_HANDLE;
1991 }
1992
1993 bool operator!() const
1994 {
1995 return m_framebuffer == VK_NULL_HANDLE;
1996 }
1997
1998 private:
1999 VkFramebuffer m_framebuffer;
2000 };
2001 static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
2002
2003 class RenderPass
2004 {
2005 public:
2006 RenderPass()
2007 : m_renderPass(VK_NULL_HANDLE)
2008 {}
2009
2010#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2011 RenderPass(VkRenderPass renderPass)
2012 : m_renderPass(renderPass)
2013 {}
2014
2015 RenderPass& operator=(VkRenderPass renderPass)
2016 {
2017 m_renderPass = renderPass;
2018 return *this;
2019 }
2020#endif
2021
Lenny Komow339ffcd2016-08-26 14:10:08 -06002022 bool operator==(RenderPass const &rhs) const
2023 {
2024 return m_renderPass == rhs.m_renderPass;
2025 }
2026
2027 bool operator!=(RenderPass const &rhs) const
2028 {
2029 return m_renderPass != rhs.m_renderPass;
2030 }
2031
2032 bool operator<(RenderPass const &rhs) const
2033 {
2034 return m_renderPass < rhs.m_renderPass;
2035 }
2036
Lenny Komowb0a17f22016-08-11 11:23:15 -06002037#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2038 explicit
2039#endif
2040 operator VkRenderPass() const
2041 {
2042 return m_renderPass;
2043 }
2044
2045 explicit operator bool() const
2046 {
2047 return m_renderPass != VK_NULL_HANDLE;
2048 }
2049
2050 bool operator!() const
2051 {
2052 return m_renderPass == VK_NULL_HANDLE;
2053 }
2054
2055 private:
2056 VkRenderPass m_renderPass;
2057 };
2058 static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
2059
2060 class PipelineCache
2061 {
2062 public:
2063 PipelineCache()
2064 : m_pipelineCache(VK_NULL_HANDLE)
2065 {}
2066
2067#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2068 PipelineCache(VkPipelineCache pipelineCache)
2069 : m_pipelineCache(pipelineCache)
2070 {}
2071
2072 PipelineCache& operator=(VkPipelineCache pipelineCache)
2073 {
2074 m_pipelineCache = pipelineCache;
2075 return *this;
2076 }
2077#endif
2078
Lenny Komow339ffcd2016-08-26 14:10:08 -06002079 bool operator==(PipelineCache const &rhs) const
2080 {
2081 return m_pipelineCache == rhs.m_pipelineCache;
2082 }
2083
2084 bool operator!=(PipelineCache const &rhs) const
2085 {
2086 return m_pipelineCache != rhs.m_pipelineCache;
2087 }
2088
2089 bool operator<(PipelineCache const &rhs) const
2090 {
2091 return m_pipelineCache < rhs.m_pipelineCache;
2092 }
2093
Lenny Komowb0a17f22016-08-11 11:23:15 -06002094#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2095 explicit
2096#endif
2097 operator VkPipelineCache() const
2098 {
2099 return m_pipelineCache;
2100 }
2101
2102 explicit operator bool() const
2103 {
2104 return m_pipelineCache != VK_NULL_HANDLE;
2105 }
2106
2107 bool operator!() const
2108 {
2109 return m_pipelineCache == VK_NULL_HANDLE;
2110 }
2111
2112 private:
2113 VkPipelineCache m_pipelineCache;
2114 };
2115 static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
2116
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07002117 class ObjectTableNVX
2118 {
2119 public:
2120 ObjectTableNVX()
2121 : m_objectTableNVX(VK_NULL_HANDLE)
2122 {}
2123
2124#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2125 ObjectTableNVX(VkObjectTableNVX objectTableNVX)
2126 : m_objectTableNVX(objectTableNVX)
2127 {}
2128
2129 ObjectTableNVX& operator=(VkObjectTableNVX objectTableNVX)
2130 {
2131 m_objectTableNVX = objectTableNVX;
2132 return *this;
2133 }
2134#endif
2135
2136 bool operator==(ObjectTableNVX const &rhs) const
2137 {
2138 return m_objectTableNVX == rhs.m_objectTableNVX;
2139 }
2140
2141 bool operator!=(ObjectTableNVX const &rhs) const
2142 {
2143 return m_objectTableNVX != rhs.m_objectTableNVX;
2144 }
2145
2146 bool operator<(ObjectTableNVX const &rhs) const
2147 {
2148 return m_objectTableNVX < rhs.m_objectTableNVX;
2149 }
2150
2151#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2152 explicit
2153#endif
2154 operator VkObjectTableNVX() const
2155 {
2156 return m_objectTableNVX;
2157 }
2158
2159 explicit operator bool() const
2160 {
2161 return m_objectTableNVX != VK_NULL_HANDLE;
2162 }
2163
2164 bool operator!() const
2165 {
2166 return m_objectTableNVX == VK_NULL_HANDLE;
2167 }
2168
2169 private:
2170 VkObjectTableNVX m_objectTableNVX;
2171 };
2172 static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
2173
2174 class IndirectCommandsLayoutNVX
2175 {
2176 public:
2177 IndirectCommandsLayoutNVX()
2178 : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
2179 {}
2180
2181#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2182 IndirectCommandsLayoutNVX(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2183 : m_indirectCommandsLayoutNVX(indirectCommandsLayoutNVX)
2184 {}
2185
2186 IndirectCommandsLayoutNVX& operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2187 {
2188 m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
2189 return *this;
2190 }
2191#endif
2192
2193 bool operator==(IndirectCommandsLayoutNVX const &rhs) const
2194 {
2195 return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
2196 }
2197
2198 bool operator!=(IndirectCommandsLayoutNVX const &rhs) const
2199 {
2200 return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
2201 }
2202
2203 bool operator<(IndirectCommandsLayoutNVX const &rhs) const
2204 {
2205 return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
2206 }
2207
2208#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2209 explicit
2210#endif
2211 operator VkIndirectCommandsLayoutNVX() const
2212 {
2213 return m_indirectCommandsLayoutNVX;
2214 }
2215
2216 explicit operator bool() const
2217 {
2218 return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
2219 }
2220
2221 bool operator!() const
2222 {
2223 return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
2224 }
2225
2226 private:
2227 VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
2228 };
2229 static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
2230
Lenny Komowb0a17f22016-08-11 11:23:15 -06002231 class DisplayKHR
2232 {
2233 public:
2234 DisplayKHR()
2235 : m_displayKHR(VK_NULL_HANDLE)
2236 {}
2237
2238#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2239 DisplayKHR(VkDisplayKHR displayKHR)
2240 : m_displayKHR(displayKHR)
2241 {}
2242
2243 DisplayKHR& operator=(VkDisplayKHR displayKHR)
2244 {
2245 m_displayKHR = displayKHR;
2246 return *this;
2247 }
2248#endif
2249
Lenny Komow339ffcd2016-08-26 14:10:08 -06002250 bool operator==(DisplayKHR const &rhs) const
2251 {
2252 return m_displayKHR == rhs.m_displayKHR;
2253 }
2254
2255 bool operator!=(DisplayKHR const &rhs) const
2256 {
2257 return m_displayKHR != rhs.m_displayKHR;
2258 }
2259
2260 bool operator<(DisplayKHR const &rhs) const
2261 {
2262 return m_displayKHR < rhs.m_displayKHR;
2263 }
2264
Lenny Komowb0a17f22016-08-11 11:23:15 -06002265#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2266 explicit
2267#endif
2268 operator VkDisplayKHR() const
2269 {
2270 return m_displayKHR;
2271 }
2272
2273 explicit operator bool() const
2274 {
2275 return m_displayKHR != VK_NULL_HANDLE;
2276 }
2277
2278 bool operator!() const
2279 {
2280 return m_displayKHR == VK_NULL_HANDLE;
2281 }
2282
2283 private:
2284 VkDisplayKHR m_displayKHR;
2285 };
2286 static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
2287
2288 class DisplayModeKHR
2289 {
2290 public:
2291 DisplayModeKHR()
2292 : m_displayModeKHR(VK_NULL_HANDLE)
2293 {}
2294
2295#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2296 DisplayModeKHR(VkDisplayModeKHR displayModeKHR)
2297 : m_displayModeKHR(displayModeKHR)
2298 {}
2299
2300 DisplayModeKHR& operator=(VkDisplayModeKHR displayModeKHR)
2301 {
2302 m_displayModeKHR = displayModeKHR;
2303 return *this;
2304 }
2305#endif
2306
Lenny Komow339ffcd2016-08-26 14:10:08 -06002307 bool operator==(DisplayModeKHR const &rhs) const
2308 {
2309 return m_displayModeKHR == rhs.m_displayModeKHR;
2310 }
2311
2312 bool operator!=(DisplayModeKHR const &rhs) const
2313 {
2314 return m_displayModeKHR != rhs.m_displayModeKHR;
2315 }
2316
2317 bool operator<(DisplayModeKHR const &rhs) const
2318 {
2319 return m_displayModeKHR < rhs.m_displayModeKHR;
2320 }
2321
Lenny Komowb0a17f22016-08-11 11:23:15 -06002322#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2323 explicit
2324#endif
2325 operator VkDisplayModeKHR() const
2326 {
2327 return m_displayModeKHR;
2328 }
2329
2330 explicit operator bool() const
2331 {
2332 return m_displayModeKHR != VK_NULL_HANDLE;
2333 }
2334
2335 bool operator!() const
2336 {
2337 return m_displayModeKHR == VK_NULL_HANDLE;
2338 }
2339
2340 private:
2341 VkDisplayModeKHR m_displayModeKHR;
2342 };
2343 static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
2344
2345 class SurfaceKHR
2346 {
2347 public:
2348 SurfaceKHR()
2349 : m_surfaceKHR(VK_NULL_HANDLE)
2350 {}
2351
2352#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2353 SurfaceKHR(VkSurfaceKHR surfaceKHR)
2354 : m_surfaceKHR(surfaceKHR)
2355 {}
2356
2357 SurfaceKHR& operator=(VkSurfaceKHR surfaceKHR)
2358 {
2359 m_surfaceKHR = surfaceKHR;
2360 return *this;
2361 }
2362#endif
2363
Lenny Komow339ffcd2016-08-26 14:10:08 -06002364 bool operator==(SurfaceKHR const &rhs) const
2365 {
2366 return m_surfaceKHR == rhs.m_surfaceKHR;
2367 }
2368
2369 bool operator!=(SurfaceKHR const &rhs) const
2370 {
2371 return m_surfaceKHR != rhs.m_surfaceKHR;
2372 }
2373
2374 bool operator<(SurfaceKHR const &rhs) const
2375 {
2376 return m_surfaceKHR < rhs.m_surfaceKHR;
2377 }
2378
Lenny Komowb0a17f22016-08-11 11:23:15 -06002379#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2380 explicit
2381#endif
2382 operator VkSurfaceKHR() const
2383 {
2384 return m_surfaceKHR;
2385 }
2386
2387 explicit operator bool() const
2388 {
2389 return m_surfaceKHR != VK_NULL_HANDLE;
2390 }
2391
2392 bool operator!() const
2393 {
2394 return m_surfaceKHR == VK_NULL_HANDLE;
2395 }
2396
2397 private:
2398 VkSurfaceKHR m_surfaceKHR;
2399 };
2400 static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
2401
2402 class SwapchainKHR
2403 {
2404 public:
2405 SwapchainKHR()
2406 : m_swapchainKHR(VK_NULL_HANDLE)
2407 {}
2408
2409#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2410 SwapchainKHR(VkSwapchainKHR swapchainKHR)
2411 : m_swapchainKHR(swapchainKHR)
2412 {}
2413
2414 SwapchainKHR& operator=(VkSwapchainKHR swapchainKHR)
2415 {
2416 m_swapchainKHR = swapchainKHR;
2417 return *this;
2418 }
2419#endif
2420
Lenny Komow339ffcd2016-08-26 14:10:08 -06002421 bool operator==(SwapchainKHR const &rhs) const
2422 {
2423 return m_swapchainKHR == rhs.m_swapchainKHR;
2424 }
2425
2426 bool operator!=(SwapchainKHR const &rhs) const
2427 {
2428 return m_swapchainKHR != rhs.m_swapchainKHR;
2429 }
2430
2431 bool operator<(SwapchainKHR const &rhs) const
2432 {
2433 return m_swapchainKHR < rhs.m_swapchainKHR;
2434 }
2435
Lenny Komowb0a17f22016-08-11 11:23:15 -06002436#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2437 explicit
2438#endif
2439 operator VkSwapchainKHR() const
2440 {
2441 return m_swapchainKHR;
2442 }
2443
2444 explicit operator bool() const
2445 {
2446 return m_swapchainKHR != VK_NULL_HANDLE;
2447 }
2448
2449 bool operator!() const
2450 {
2451 return m_swapchainKHR == VK_NULL_HANDLE;
2452 }
2453
2454 private:
2455 VkSwapchainKHR m_swapchainKHR;
2456 };
2457 static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
2458
2459 class DebugReportCallbackEXT
2460 {
2461 public:
2462 DebugReportCallbackEXT()
2463 : m_debugReportCallbackEXT(VK_NULL_HANDLE)
2464 {}
2465
2466#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2467 DebugReportCallbackEXT(VkDebugReportCallbackEXT debugReportCallbackEXT)
2468 : m_debugReportCallbackEXT(debugReportCallbackEXT)
2469 {}
2470
2471 DebugReportCallbackEXT& operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
2472 {
2473 m_debugReportCallbackEXT = debugReportCallbackEXT;
2474 return *this;
2475 }
2476#endif
2477
Lenny Komow339ffcd2016-08-26 14:10:08 -06002478 bool operator==(DebugReportCallbackEXT const &rhs) const
2479 {
2480 return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
2481 }
2482
2483 bool operator!=(DebugReportCallbackEXT const &rhs) const
2484 {
2485 return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
2486 }
2487
2488 bool operator<(DebugReportCallbackEXT const &rhs) const
2489 {
2490 return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
2491 }
2492
Lenny Komowb0a17f22016-08-11 11:23:15 -06002493#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2494 explicit
2495#endif
2496 operator VkDebugReportCallbackEXT() const
2497 {
2498 return m_debugReportCallbackEXT;
2499 }
2500
2501 explicit operator bool() const
2502 {
2503 return m_debugReportCallbackEXT != VK_NULL_HANDLE;
2504 }
2505
2506 bool operator!() const
2507 {
2508 return m_debugReportCallbackEXT == VK_NULL_HANDLE;
2509 }
2510
2511 private:
2512 VkDebugReportCallbackEXT m_debugReportCallbackEXT;
2513 };
2514 static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
2515
2516 struct Offset2D
2517 {
2518 Offset2D( int32_t x_ = 0, int32_t y_ = 0 )
2519 : x( x_ )
2520 , y( y_ )
2521 {
2522 }
2523
2524 Offset2D( VkOffset2D const & rhs )
2525 {
2526 memcpy( this, &rhs, sizeof(Offset2D) );
2527 }
2528
2529 Offset2D& operator=( VkOffset2D const & rhs )
2530 {
2531 memcpy( this, &rhs, sizeof(Offset2D) );
2532 return *this;
2533 }
2534
2535 Offset2D& setX( int32_t x_ )
2536 {
2537 x = x_;
2538 return *this;
2539 }
2540
2541 Offset2D& setY( int32_t y_ )
2542 {
2543 y = y_;
2544 return *this;
2545 }
2546
2547 operator const VkOffset2D&() const
2548 {
2549 return *reinterpret_cast<const VkOffset2D*>(this);
2550 }
2551
2552 bool operator==( Offset2D const& rhs ) const
2553 {
2554 return ( x == rhs.x )
2555 && ( y == rhs.y );
2556 }
2557
2558 bool operator!=( Offset2D const& rhs ) const
2559 {
2560 return !operator==( rhs );
2561 }
2562
2563 int32_t x;
2564 int32_t y;
2565 };
2566 static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
2567
2568 struct Offset3D
2569 {
2570 Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 )
2571 : x( x_ )
2572 , y( y_ )
2573 , z( z_ )
2574 {
2575 }
2576
2577 Offset3D( VkOffset3D const & rhs )
2578 {
2579 memcpy( this, &rhs, sizeof(Offset3D) );
2580 }
2581
2582 Offset3D& operator=( VkOffset3D const & rhs )
2583 {
2584 memcpy( this, &rhs, sizeof(Offset3D) );
2585 return *this;
2586 }
2587
2588 Offset3D& setX( int32_t x_ )
2589 {
2590 x = x_;
2591 return *this;
2592 }
2593
2594 Offset3D& setY( int32_t y_ )
2595 {
2596 y = y_;
2597 return *this;
2598 }
2599
2600 Offset3D& setZ( int32_t z_ )
2601 {
2602 z = z_;
2603 return *this;
2604 }
2605
2606 operator const VkOffset3D&() const
2607 {
2608 return *reinterpret_cast<const VkOffset3D*>(this);
2609 }
2610
2611 bool operator==( Offset3D const& rhs ) const
2612 {
2613 return ( x == rhs.x )
2614 && ( y == rhs.y )
2615 && ( z == rhs.z );
2616 }
2617
2618 bool operator!=( Offset3D const& rhs ) const
2619 {
2620 return !operator==( rhs );
2621 }
2622
2623 int32_t x;
2624 int32_t y;
2625 int32_t z;
2626 };
2627 static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
2628
2629 struct Extent2D
2630 {
2631 Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 )
2632 : width( width_ )
2633 , height( height_ )
2634 {
2635 }
2636
2637 Extent2D( VkExtent2D const & rhs )
2638 {
2639 memcpy( this, &rhs, sizeof(Extent2D) );
2640 }
2641
2642 Extent2D& operator=( VkExtent2D const & rhs )
2643 {
2644 memcpy( this, &rhs, sizeof(Extent2D) );
2645 return *this;
2646 }
2647
2648 Extent2D& setWidth( uint32_t width_ )
2649 {
2650 width = width_;
2651 return *this;
2652 }
2653
2654 Extent2D& setHeight( uint32_t height_ )
2655 {
2656 height = height_;
2657 return *this;
2658 }
2659
2660 operator const VkExtent2D&() const
2661 {
2662 return *reinterpret_cast<const VkExtent2D*>(this);
2663 }
2664
2665 bool operator==( Extent2D const& rhs ) const
2666 {
2667 return ( width == rhs.width )
2668 && ( height == rhs.height );
2669 }
2670
2671 bool operator!=( Extent2D const& rhs ) const
2672 {
2673 return !operator==( rhs );
2674 }
2675
2676 uint32_t width;
2677 uint32_t height;
2678 };
2679 static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
2680
2681 struct Extent3D
2682 {
2683 Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 )
2684 : width( width_ )
2685 , height( height_ )
2686 , depth( depth_ )
2687 {
2688 }
2689
2690 Extent3D( VkExtent3D const & rhs )
2691 {
2692 memcpy( this, &rhs, sizeof(Extent3D) );
2693 }
2694
2695 Extent3D& operator=( VkExtent3D const & rhs )
2696 {
2697 memcpy( this, &rhs, sizeof(Extent3D) );
2698 return *this;
2699 }
2700
2701 Extent3D& setWidth( uint32_t width_ )
2702 {
2703 width = width_;
2704 return *this;
2705 }
2706
2707 Extent3D& setHeight( uint32_t height_ )
2708 {
2709 height = height_;
2710 return *this;
2711 }
2712
2713 Extent3D& setDepth( uint32_t depth_ )
2714 {
2715 depth = depth_;
2716 return *this;
2717 }
2718
2719 operator const VkExtent3D&() const
2720 {
2721 return *reinterpret_cast<const VkExtent3D*>(this);
2722 }
2723
2724 bool operator==( Extent3D const& rhs ) const
2725 {
2726 return ( width == rhs.width )
2727 && ( height == rhs.height )
2728 && ( depth == rhs.depth );
2729 }
2730
2731 bool operator!=( Extent3D const& rhs ) const
2732 {
2733 return !operator==( rhs );
2734 }
2735
2736 uint32_t width;
2737 uint32_t height;
2738 uint32_t depth;
2739 };
2740 static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
2741
2742 struct Viewport
2743 {
2744 Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 )
2745 : x( x_ )
2746 , y( y_ )
2747 , width( width_ )
2748 , height( height_ )
2749 , minDepth( minDepth_ )
2750 , maxDepth( maxDepth_ )
2751 {
2752 }
2753
2754 Viewport( VkViewport const & rhs )
2755 {
2756 memcpy( this, &rhs, sizeof(Viewport) );
2757 }
2758
2759 Viewport& operator=( VkViewport const & rhs )
2760 {
2761 memcpy( this, &rhs, sizeof(Viewport) );
2762 return *this;
2763 }
2764
2765 Viewport& setX( float x_ )
2766 {
2767 x = x_;
2768 return *this;
2769 }
2770
2771 Viewport& setY( float y_ )
2772 {
2773 y = y_;
2774 return *this;
2775 }
2776
2777 Viewport& setWidth( float width_ )
2778 {
2779 width = width_;
2780 return *this;
2781 }
2782
2783 Viewport& setHeight( float height_ )
2784 {
2785 height = height_;
2786 return *this;
2787 }
2788
2789 Viewport& setMinDepth( float minDepth_ )
2790 {
2791 minDepth = minDepth_;
2792 return *this;
2793 }
2794
2795 Viewport& setMaxDepth( float maxDepth_ )
2796 {
2797 maxDepth = maxDepth_;
2798 return *this;
2799 }
2800
2801 operator const VkViewport&() const
2802 {
2803 return *reinterpret_cast<const VkViewport*>(this);
2804 }
2805
2806 bool operator==( Viewport const& rhs ) const
2807 {
2808 return ( x == rhs.x )
2809 && ( y == rhs.y )
2810 && ( width == rhs.width )
2811 && ( height == rhs.height )
2812 && ( minDepth == rhs.minDepth )
2813 && ( maxDepth == rhs.maxDepth );
2814 }
2815
2816 bool operator!=( Viewport const& rhs ) const
2817 {
2818 return !operator==( rhs );
2819 }
2820
2821 float x;
2822 float y;
2823 float width;
2824 float height;
2825 float minDepth;
2826 float maxDepth;
2827 };
2828 static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
2829
2830 struct Rect2D
2831 {
2832 Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() )
2833 : offset( offset_ )
2834 , extent( extent_ )
2835 {
2836 }
2837
2838 Rect2D( VkRect2D const & rhs )
2839 {
2840 memcpy( this, &rhs, sizeof(Rect2D) );
2841 }
2842
2843 Rect2D& operator=( VkRect2D const & rhs )
2844 {
2845 memcpy( this, &rhs, sizeof(Rect2D) );
2846 return *this;
2847 }
2848
2849 Rect2D& setOffset( Offset2D offset_ )
2850 {
2851 offset = offset_;
2852 return *this;
2853 }
2854
2855 Rect2D& setExtent( Extent2D extent_ )
2856 {
2857 extent = extent_;
2858 return *this;
2859 }
2860
2861 operator const VkRect2D&() const
2862 {
2863 return *reinterpret_cast<const VkRect2D*>(this);
2864 }
2865
2866 bool operator==( Rect2D const& rhs ) const
2867 {
2868 return ( offset == rhs.offset )
2869 && ( extent == rhs.extent );
2870 }
2871
2872 bool operator!=( Rect2D const& rhs ) const
2873 {
2874 return !operator==( rhs );
2875 }
2876
2877 Offset2D offset;
2878 Extent2D extent;
2879 };
2880 static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
2881
2882 struct ClearRect
2883 {
2884 ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
2885 : rect( rect_ )
2886 , baseArrayLayer( baseArrayLayer_ )
2887 , layerCount( layerCount_ )
2888 {
2889 }
2890
2891 ClearRect( VkClearRect const & rhs )
2892 {
2893 memcpy( this, &rhs, sizeof(ClearRect) );
2894 }
2895
2896 ClearRect& operator=( VkClearRect const & rhs )
2897 {
2898 memcpy( this, &rhs, sizeof(ClearRect) );
2899 return *this;
2900 }
2901
2902 ClearRect& setRect( Rect2D rect_ )
2903 {
2904 rect = rect_;
2905 return *this;
2906 }
2907
2908 ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
2909 {
2910 baseArrayLayer = baseArrayLayer_;
2911 return *this;
2912 }
2913
2914 ClearRect& setLayerCount( uint32_t layerCount_ )
2915 {
2916 layerCount = layerCount_;
2917 return *this;
2918 }
2919
2920 operator const VkClearRect&() const
2921 {
2922 return *reinterpret_cast<const VkClearRect*>(this);
2923 }
2924
2925 bool operator==( ClearRect const& rhs ) const
2926 {
2927 return ( rect == rhs.rect )
2928 && ( baseArrayLayer == rhs.baseArrayLayer )
2929 && ( layerCount == rhs.layerCount );
2930 }
2931
2932 bool operator!=( ClearRect const& rhs ) const
2933 {
2934 return !operator==( rhs );
2935 }
2936
2937 Rect2D rect;
2938 uint32_t baseArrayLayer;
2939 uint32_t layerCount;
2940 };
2941 static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
2942
2943 struct ExtensionProperties
2944 {
2945 operator const VkExtensionProperties&() const
2946 {
2947 return *reinterpret_cast<const VkExtensionProperties*>(this);
2948 }
2949
2950 bool operator==( ExtensionProperties const& rhs ) const
2951 {
2952 return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
2953 && ( specVersion == rhs.specVersion );
2954 }
2955
2956 bool operator!=( ExtensionProperties const& rhs ) const
2957 {
2958 return !operator==( rhs );
2959 }
2960
2961 char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
2962 uint32_t specVersion;
2963 };
2964 static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
2965
2966 struct LayerProperties
2967 {
2968 operator const VkLayerProperties&() const
2969 {
2970 return *reinterpret_cast<const VkLayerProperties*>(this);
2971 }
2972
2973 bool operator==( LayerProperties const& rhs ) const
2974 {
2975 return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
2976 && ( specVersion == rhs.specVersion )
2977 && ( implementationVersion == rhs.implementationVersion )
2978 && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
2979 }
2980
2981 bool operator!=( LayerProperties const& rhs ) const
2982 {
2983 return !operator==( rhs );
2984 }
2985
2986 char layerName[VK_MAX_EXTENSION_NAME_SIZE];
2987 uint32_t specVersion;
2988 uint32_t implementationVersion;
2989 char description[VK_MAX_DESCRIPTION_SIZE];
2990 };
2991 static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
2992
2993 struct AllocationCallbacks
2994 {
2995 AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
2996 : pUserData( pUserData_ )
2997 , pfnAllocation( pfnAllocation_ )
2998 , pfnReallocation( pfnReallocation_ )
2999 , pfnFree( pfnFree_ )
3000 , pfnInternalAllocation( pfnInternalAllocation_ )
3001 , pfnInternalFree( pfnInternalFree_ )
3002 {
3003 }
3004
3005 AllocationCallbacks( VkAllocationCallbacks const & rhs )
3006 {
3007 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
3008 }
3009
3010 AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
3011 {
3012 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
3013 return *this;
3014 }
3015
3016 AllocationCallbacks& setPUserData( void* pUserData_ )
3017 {
3018 pUserData = pUserData_;
3019 return *this;
3020 }
3021
3022 AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
3023 {
3024 pfnAllocation = pfnAllocation_;
3025 return *this;
3026 }
3027
3028 AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
3029 {
3030 pfnReallocation = pfnReallocation_;
3031 return *this;
3032 }
3033
3034 AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
3035 {
3036 pfnFree = pfnFree_;
3037 return *this;
3038 }
3039
3040 AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
3041 {
3042 pfnInternalAllocation = pfnInternalAllocation_;
3043 return *this;
3044 }
3045
3046 AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
3047 {
3048 pfnInternalFree = pfnInternalFree_;
3049 return *this;
3050 }
3051
3052 operator const VkAllocationCallbacks&() const
3053 {
3054 return *reinterpret_cast<const VkAllocationCallbacks*>(this);
3055 }
3056
3057 bool operator==( AllocationCallbacks const& rhs ) const
3058 {
3059 return ( pUserData == rhs.pUserData )
3060 && ( pfnAllocation == rhs.pfnAllocation )
3061 && ( pfnReallocation == rhs.pfnReallocation )
3062 && ( pfnFree == rhs.pfnFree )
3063 && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
3064 && ( pfnInternalFree == rhs.pfnInternalFree );
3065 }
3066
3067 bool operator!=( AllocationCallbacks const& rhs ) const
3068 {
3069 return !operator==( rhs );
3070 }
3071
3072 void* pUserData;
3073 PFN_vkAllocationFunction pfnAllocation;
3074 PFN_vkReallocationFunction pfnReallocation;
3075 PFN_vkFreeFunction pfnFree;
3076 PFN_vkInternalAllocationNotification pfnInternalAllocation;
3077 PFN_vkInternalFreeNotification pfnInternalFree;
3078 };
3079 static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
3080
3081 struct MemoryRequirements
3082 {
3083 operator const VkMemoryRequirements&() const
3084 {
3085 return *reinterpret_cast<const VkMemoryRequirements*>(this);
3086 }
3087
3088 bool operator==( MemoryRequirements const& rhs ) const
3089 {
3090 return ( size == rhs.size )
3091 && ( alignment == rhs.alignment )
3092 && ( memoryTypeBits == rhs.memoryTypeBits );
3093 }
3094
3095 bool operator!=( MemoryRequirements const& rhs ) const
3096 {
3097 return !operator==( rhs );
3098 }
3099
3100 DeviceSize size;
3101 DeviceSize alignment;
3102 uint32_t memoryTypeBits;
3103 };
3104 static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
3105
3106 struct DescriptorBufferInfo
3107 {
3108 DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 )
3109 : buffer( buffer_ )
3110 , offset( offset_ )
3111 , range( range_ )
3112 {
3113 }
3114
3115 DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
3116 {
3117 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3118 }
3119
3120 DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
3121 {
3122 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3123 return *this;
3124 }
3125
3126 DescriptorBufferInfo& setBuffer( Buffer buffer_ )
3127 {
3128 buffer = buffer_;
3129 return *this;
3130 }
3131
3132 DescriptorBufferInfo& setOffset( DeviceSize offset_ )
3133 {
3134 offset = offset_;
3135 return *this;
3136 }
3137
3138 DescriptorBufferInfo& setRange( DeviceSize range_ )
3139 {
3140 range = range_;
3141 return *this;
3142 }
3143
3144 operator const VkDescriptorBufferInfo&() const
3145 {
3146 return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
3147 }
3148
3149 bool operator==( DescriptorBufferInfo const& rhs ) const
3150 {
3151 return ( buffer == rhs.buffer )
3152 && ( offset == rhs.offset )
3153 && ( range == rhs.range );
3154 }
3155
3156 bool operator!=( DescriptorBufferInfo const& rhs ) const
3157 {
3158 return !operator==( rhs );
3159 }
3160
3161 Buffer buffer;
3162 DeviceSize offset;
3163 DeviceSize range;
3164 };
3165 static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
3166
3167 struct SubresourceLayout
3168 {
3169 operator const VkSubresourceLayout&() const
3170 {
3171 return *reinterpret_cast<const VkSubresourceLayout*>(this);
3172 }
3173
3174 bool operator==( SubresourceLayout const& rhs ) const
3175 {
3176 return ( offset == rhs.offset )
3177 && ( size == rhs.size )
3178 && ( rowPitch == rhs.rowPitch )
3179 && ( arrayPitch == rhs.arrayPitch )
3180 && ( depthPitch == rhs.depthPitch );
3181 }
3182
3183 bool operator!=( SubresourceLayout const& rhs ) const
3184 {
3185 return !operator==( rhs );
3186 }
3187
3188 DeviceSize offset;
3189 DeviceSize size;
3190 DeviceSize rowPitch;
3191 DeviceSize arrayPitch;
3192 DeviceSize depthPitch;
3193 };
3194 static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
3195
3196 struct BufferCopy
3197 {
3198 BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 )
3199 : srcOffset( srcOffset_ )
3200 , dstOffset( dstOffset_ )
3201 , size( size_ )
3202 {
3203 }
3204
3205 BufferCopy( VkBufferCopy const & rhs )
3206 {
3207 memcpy( this, &rhs, sizeof(BufferCopy) );
3208 }
3209
3210 BufferCopy& operator=( VkBufferCopy const & rhs )
3211 {
3212 memcpy( this, &rhs, sizeof(BufferCopy) );
3213 return *this;
3214 }
3215
3216 BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
3217 {
3218 srcOffset = srcOffset_;
3219 return *this;
3220 }
3221
3222 BufferCopy& setDstOffset( DeviceSize dstOffset_ )
3223 {
3224 dstOffset = dstOffset_;
3225 return *this;
3226 }
3227
3228 BufferCopy& setSize( DeviceSize size_ )
3229 {
3230 size = size_;
3231 return *this;
3232 }
3233
3234 operator const VkBufferCopy&() const
3235 {
3236 return *reinterpret_cast<const VkBufferCopy*>(this);
3237 }
3238
3239 bool operator==( BufferCopy const& rhs ) const
3240 {
3241 return ( srcOffset == rhs.srcOffset )
3242 && ( dstOffset == rhs.dstOffset )
3243 && ( size == rhs.size );
3244 }
3245
3246 bool operator!=( BufferCopy const& rhs ) const
3247 {
3248 return !operator==( rhs );
3249 }
3250
3251 DeviceSize srcOffset;
3252 DeviceSize dstOffset;
3253 DeviceSize size;
3254 };
3255 static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
3256
3257 struct SpecializationMapEntry
3258 {
3259 SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 )
3260 : constantID( constantID_ )
3261 , offset( offset_ )
3262 , size( size_ )
3263 {
3264 }
3265
3266 SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
3267 {
3268 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3269 }
3270
3271 SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
3272 {
3273 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3274 return *this;
3275 }
3276
3277 SpecializationMapEntry& setConstantID( uint32_t constantID_ )
3278 {
3279 constantID = constantID_;
3280 return *this;
3281 }
3282
3283 SpecializationMapEntry& setOffset( uint32_t offset_ )
3284 {
3285 offset = offset_;
3286 return *this;
3287 }
3288
3289 SpecializationMapEntry& setSize( size_t size_ )
3290 {
3291 size = size_;
3292 return *this;
3293 }
3294
3295 operator const VkSpecializationMapEntry&() const
3296 {
3297 return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
3298 }
3299
3300 bool operator==( SpecializationMapEntry const& rhs ) const
3301 {
3302 return ( constantID == rhs.constantID )
3303 && ( offset == rhs.offset )
3304 && ( size == rhs.size );
3305 }
3306
3307 bool operator!=( SpecializationMapEntry const& rhs ) const
3308 {
3309 return !operator==( rhs );
3310 }
3311
3312 uint32_t constantID;
3313 uint32_t offset;
3314 size_t size;
3315 };
3316 static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
3317
3318 struct SpecializationInfo
3319 {
3320 SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr )
3321 : mapEntryCount( mapEntryCount_ )
3322 , pMapEntries( pMapEntries_ )
3323 , dataSize( dataSize_ )
3324 , pData( pData_ )
3325 {
3326 }
3327
3328 SpecializationInfo( VkSpecializationInfo const & rhs )
3329 {
3330 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3331 }
3332
3333 SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
3334 {
3335 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3336 return *this;
3337 }
3338
3339 SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
3340 {
3341 mapEntryCount = mapEntryCount_;
3342 return *this;
3343 }
3344
3345 SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
3346 {
3347 pMapEntries = pMapEntries_;
3348 return *this;
3349 }
3350
3351 SpecializationInfo& setDataSize( size_t dataSize_ )
3352 {
3353 dataSize = dataSize_;
3354 return *this;
3355 }
3356
3357 SpecializationInfo& setPData( const void* pData_ )
3358 {
3359 pData = pData_;
3360 return *this;
3361 }
3362
3363 operator const VkSpecializationInfo&() const
3364 {
3365 return *reinterpret_cast<const VkSpecializationInfo*>(this);
3366 }
3367
3368 bool operator==( SpecializationInfo const& rhs ) const
3369 {
3370 return ( mapEntryCount == rhs.mapEntryCount )
3371 && ( pMapEntries == rhs.pMapEntries )
3372 && ( dataSize == rhs.dataSize )
3373 && ( pData == rhs.pData );
3374 }
3375
3376 bool operator!=( SpecializationInfo const& rhs ) const
3377 {
3378 return !operator==( rhs );
3379 }
3380
3381 uint32_t mapEntryCount;
3382 const SpecializationMapEntry* pMapEntries;
3383 size_t dataSize;
3384 const void* pData;
3385 };
3386 static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
3387
3388 union ClearColorValue
3389 {
3390 ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
3391 {
3392 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3393 }
3394
3395 ClearColorValue( const std::array<int32_t,4>& int32_ )
3396 {
3397 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3398 }
3399
3400 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
3401 {
3402 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3403 }
3404
3405 ClearColorValue& setFloat32( std::array<float,4> float32_ )
3406 {
3407 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3408 return *this;
3409 }
3410
3411 ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
3412 {
3413 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3414 return *this;
3415 }
3416
3417 ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
3418 {
3419 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3420 return *this;
3421 }
3422
3423 operator VkClearColorValue const& () const
3424 {
3425 return *reinterpret_cast<const VkClearColorValue*>(this);
3426 }
3427
3428 float float32[4];
3429 int32_t int32[4];
3430 uint32_t uint32[4];
3431 };
3432
3433 struct ClearDepthStencilValue
3434 {
3435 ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 )
3436 : depth( depth_ )
3437 , stencil( stencil_ )
3438 {
3439 }
3440
3441 ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
3442 {
3443 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3444 }
3445
3446 ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
3447 {
3448 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3449 return *this;
3450 }
3451
3452 ClearDepthStencilValue& setDepth( float depth_ )
3453 {
3454 depth = depth_;
3455 return *this;
3456 }
3457
3458 ClearDepthStencilValue& setStencil( uint32_t stencil_ )
3459 {
3460 stencil = stencil_;
3461 return *this;
3462 }
3463
3464 operator const VkClearDepthStencilValue&() const
3465 {
3466 return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
3467 }
3468
3469 bool operator==( ClearDepthStencilValue const& rhs ) const
3470 {
3471 return ( depth == rhs.depth )
3472 && ( stencil == rhs.stencil );
3473 }
3474
3475 bool operator!=( ClearDepthStencilValue const& rhs ) const
3476 {
3477 return !operator==( rhs );
3478 }
3479
3480 float depth;
3481 uint32_t stencil;
3482 };
3483 static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
3484
3485 union ClearValue
3486 {
3487 ClearValue( ClearColorValue color_ = ClearColorValue() )
3488 {
3489 color = color_;
3490 }
3491
3492 ClearValue( ClearDepthStencilValue depthStencil_ )
3493 {
3494 depthStencil = depthStencil_;
3495 }
3496
3497 ClearValue& setColor( ClearColorValue color_ )
3498 {
3499 color = color_;
3500 return *this;
3501 }
3502
3503 ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
3504 {
3505 depthStencil = depthStencil_;
3506 return *this;
3507 }
3508
3509 operator VkClearValue const& () const
3510 {
3511 return *reinterpret_cast<const VkClearValue*>(this);
3512 }
3513
3514#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3515 ClearColorValue color;
3516 ClearDepthStencilValue depthStencil;
3517#else
3518 VkClearColorValue color;
3519 VkClearDepthStencilValue depthStencil;
3520#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3521 };
3522
3523 struct PhysicalDeviceFeatures
3524 {
3525 PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 )
3526 : robustBufferAccess( robustBufferAccess_ )
3527 , fullDrawIndexUint32( fullDrawIndexUint32_ )
3528 , imageCubeArray( imageCubeArray_ )
3529 , independentBlend( independentBlend_ )
3530 , geometryShader( geometryShader_ )
3531 , tessellationShader( tessellationShader_ )
3532 , sampleRateShading( sampleRateShading_ )
3533 , dualSrcBlend( dualSrcBlend_ )
3534 , logicOp( logicOp_ )
3535 , multiDrawIndirect( multiDrawIndirect_ )
3536 , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
3537 , depthClamp( depthClamp_ )
3538 , depthBiasClamp( depthBiasClamp_ )
3539 , fillModeNonSolid( fillModeNonSolid_ )
3540 , depthBounds( depthBounds_ )
3541 , wideLines( wideLines_ )
3542 , largePoints( largePoints_ )
3543 , alphaToOne( alphaToOne_ )
3544 , multiViewport( multiViewport_ )
3545 , samplerAnisotropy( samplerAnisotropy_ )
3546 , textureCompressionETC2( textureCompressionETC2_ )
3547 , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
3548 , textureCompressionBC( textureCompressionBC_ )
3549 , occlusionQueryPrecise( occlusionQueryPrecise_ )
3550 , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
3551 , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
3552 , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
3553 , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
3554 , shaderImageGatherExtended( shaderImageGatherExtended_ )
3555 , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
3556 , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
3557 , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
3558 , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
3559 , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
3560 , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
3561 , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
3562 , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
3563 , shaderClipDistance( shaderClipDistance_ )
3564 , shaderCullDistance( shaderCullDistance_ )
3565 , shaderFloat64( shaderFloat64_ )
3566 , shaderInt64( shaderInt64_ )
3567 , shaderInt16( shaderInt16_ )
3568 , shaderResourceResidency( shaderResourceResidency_ )
3569 , shaderResourceMinLod( shaderResourceMinLod_ )
3570 , sparseBinding( sparseBinding_ )
3571 , sparseResidencyBuffer( sparseResidencyBuffer_ )
3572 , sparseResidencyImage2D( sparseResidencyImage2D_ )
3573 , sparseResidencyImage3D( sparseResidencyImage3D_ )
3574 , sparseResidency2Samples( sparseResidency2Samples_ )
3575 , sparseResidency4Samples( sparseResidency4Samples_ )
3576 , sparseResidency8Samples( sparseResidency8Samples_ )
3577 , sparseResidency16Samples( sparseResidency16Samples_ )
3578 , sparseResidencyAliased( sparseResidencyAliased_ )
3579 , variableMultisampleRate( variableMultisampleRate_ )
3580 , inheritedQueries( inheritedQueries_ )
3581 {
3582 }
3583
3584 PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
3585 {
3586 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3587 }
3588
3589 PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
3590 {
3591 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3592 return *this;
3593 }
3594
3595 PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
3596 {
3597 robustBufferAccess = robustBufferAccess_;
3598 return *this;
3599 }
3600
3601 PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
3602 {
3603 fullDrawIndexUint32 = fullDrawIndexUint32_;
3604 return *this;
3605 }
3606
3607 PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
3608 {
3609 imageCubeArray = imageCubeArray_;
3610 return *this;
3611 }
3612
3613 PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
3614 {
3615 independentBlend = independentBlend_;
3616 return *this;
3617 }
3618
3619 PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
3620 {
3621 geometryShader = geometryShader_;
3622 return *this;
3623 }
3624
3625 PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
3626 {
3627 tessellationShader = tessellationShader_;
3628 return *this;
3629 }
3630
3631 PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
3632 {
3633 sampleRateShading = sampleRateShading_;
3634 return *this;
3635 }
3636
3637 PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
3638 {
3639 dualSrcBlend = dualSrcBlend_;
3640 return *this;
3641 }
3642
3643 PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
3644 {
3645 logicOp = logicOp_;
3646 return *this;
3647 }
3648
3649 PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
3650 {
3651 multiDrawIndirect = multiDrawIndirect_;
3652 return *this;
3653 }
3654
3655 PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
3656 {
3657 drawIndirectFirstInstance = drawIndirectFirstInstance_;
3658 return *this;
3659 }
3660
3661 PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
3662 {
3663 depthClamp = depthClamp_;
3664 return *this;
3665 }
3666
3667 PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
3668 {
3669 depthBiasClamp = depthBiasClamp_;
3670 return *this;
3671 }
3672
3673 PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
3674 {
3675 fillModeNonSolid = fillModeNonSolid_;
3676 return *this;
3677 }
3678
3679 PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
3680 {
3681 depthBounds = depthBounds_;
3682 return *this;
3683 }
3684
3685 PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
3686 {
3687 wideLines = wideLines_;
3688 return *this;
3689 }
3690
3691 PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
3692 {
3693 largePoints = largePoints_;
3694 return *this;
3695 }
3696
3697 PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
3698 {
3699 alphaToOne = alphaToOne_;
3700 return *this;
3701 }
3702
3703 PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
3704 {
3705 multiViewport = multiViewport_;
3706 return *this;
3707 }
3708
3709 PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
3710 {
3711 samplerAnisotropy = samplerAnisotropy_;
3712 return *this;
3713 }
3714
3715 PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
3716 {
3717 textureCompressionETC2 = textureCompressionETC2_;
3718 return *this;
3719 }
3720
3721 PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
3722 {
3723 textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
3724 return *this;
3725 }
3726
3727 PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
3728 {
3729 textureCompressionBC = textureCompressionBC_;
3730 return *this;
3731 }
3732
3733 PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
3734 {
3735 occlusionQueryPrecise = occlusionQueryPrecise_;
3736 return *this;
3737 }
3738
3739 PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
3740 {
3741 pipelineStatisticsQuery = pipelineStatisticsQuery_;
3742 return *this;
3743 }
3744
3745 PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
3746 {
3747 vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
3748 return *this;
3749 }
3750
3751 PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
3752 {
3753 fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
3754 return *this;
3755 }
3756
3757 PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
3758 {
3759 shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
3760 return *this;
3761 }
3762
3763 PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
3764 {
3765 shaderImageGatherExtended = shaderImageGatherExtended_;
3766 return *this;
3767 }
3768
3769 PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
3770 {
3771 shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
3772 return *this;
3773 }
3774
3775 PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
3776 {
3777 shaderStorageImageMultisample = shaderStorageImageMultisample_;
3778 return *this;
3779 }
3780
3781 PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
3782 {
3783 shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
3784 return *this;
3785 }
3786
3787 PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
3788 {
3789 shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
3790 return *this;
3791 }
3792
3793 PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
3794 {
3795 shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
3796 return *this;
3797 }
3798
3799 PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
3800 {
3801 shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
3802 return *this;
3803 }
3804
3805 PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
3806 {
3807 shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
3808 return *this;
3809 }
3810
3811 PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
3812 {
3813 shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
3814 return *this;
3815 }
3816
3817 PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
3818 {
3819 shaderClipDistance = shaderClipDistance_;
3820 return *this;
3821 }
3822
3823 PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
3824 {
3825 shaderCullDistance = shaderCullDistance_;
3826 return *this;
3827 }
3828
3829 PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
3830 {
3831 shaderFloat64 = shaderFloat64_;
3832 return *this;
3833 }
3834
3835 PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
3836 {
3837 shaderInt64 = shaderInt64_;
3838 return *this;
3839 }
3840
3841 PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
3842 {
3843 shaderInt16 = shaderInt16_;
3844 return *this;
3845 }
3846
3847 PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
3848 {
3849 shaderResourceResidency = shaderResourceResidency_;
3850 return *this;
3851 }
3852
3853 PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
3854 {
3855 shaderResourceMinLod = shaderResourceMinLod_;
3856 return *this;
3857 }
3858
3859 PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
3860 {
3861 sparseBinding = sparseBinding_;
3862 return *this;
3863 }
3864
3865 PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
3866 {
3867 sparseResidencyBuffer = sparseResidencyBuffer_;
3868 return *this;
3869 }
3870
3871 PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
3872 {
3873 sparseResidencyImage2D = sparseResidencyImage2D_;
3874 return *this;
3875 }
3876
3877 PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
3878 {
3879 sparseResidencyImage3D = sparseResidencyImage3D_;
3880 return *this;
3881 }
3882
3883 PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
3884 {
3885 sparseResidency2Samples = sparseResidency2Samples_;
3886 return *this;
3887 }
3888
3889 PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
3890 {
3891 sparseResidency4Samples = sparseResidency4Samples_;
3892 return *this;
3893 }
3894
3895 PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
3896 {
3897 sparseResidency8Samples = sparseResidency8Samples_;
3898 return *this;
3899 }
3900
3901 PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
3902 {
3903 sparseResidency16Samples = sparseResidency16Samples_;
3904 return *this;
3905 }
3906
3907 PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
3908 {
3909 sparseResidencyAliased = sparseResidencyAliased_;
3910 return *this;
3911 }
3912
3913 PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
3914 {
3915 variableMultisampleRate = variableMultisampleRate_;
3916 return *this;
3917 }
3918
3919 PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
3920 {
3921 inheritedQueries = inheritedQueries_;
3922 return *this;
3923 }
3924
3925 operator const VkPhysicalDeviceFeatures&() const
3926 {
3927 return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
3928 }
3929
3930 bool operator==( PhysicalDeviceFeatures const& rhs ) const
3931 {
3932 return ( robustBufferAccess == rhs.robustBufferAccess )
3933 && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
3934 && ( imageCubeArray == rhs.imageCubeArray )
3935 && ( independentBlend == rhs.independentBlend )
3936 && ( geometryShader == rhs.geometryShader )
3937 && ( tessellationShader == rhs.tessellationShader )
3938 && ( sampleRateShading == rhs.sampleRateShading )
3939 && ( dualSrcBlend == rhs.dualSrcBlend )
3940 && ( logicOp == rhs.logicOp )
3941 && ( multiDrawIndirect == rhs.multiDrawIndirect )
3942 && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
3943 && ( depthClamp == rhs.depthClamp )
3944 && ( depthBiasClamp == rhs.depthBiasClamp )
3945 && ( fillModeNonSolid == rhs.fillModeNonSolid )
3946 && ( depthBounds == rhs.depthBounds )
3947 && ( wideLines == rhs.wideLines )
3948 && ( largePoints == rhs.largePoints )
3949 && ( alphaToOne == rhs.alphaToOne )
3950 && ( multiViewport == rhs.multiViewport )
3951 && ( samplerAnisotropy == rhs.samplerAnisotropy )
3952 && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
3953 && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
3954 && ( textureCompressionBC == rhs.textureCompressionBC )
3955 && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
3956 && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
3957 && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
3958 && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
3959 && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
3960 && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
3961 && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
3962 && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
3963 && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
3964 && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
3965 && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
3966 && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
3967 && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
3968 && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
3969 && ( shaderClipDistance == rhs.shaderClipDistance )
3970 && ( shaderCullDistance == rhs.shaderCullDistance )
3971 && ( shaderFloat64 == rhs.shaderFloat64 )
3972 && ( shaderInt64 == rhs.shaderInt64 )
3973 && ( shaderInt16 == rhs.shaderInt16 )
3974 && ( shaderResourceResidency == rhs.shaderResourceResidency )
3975 && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
3976 && ( sparseBinding == rhs.sparseBinding )
3977 && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
3978 && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
3979 && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
3980 && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
3981 && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
3982 && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
3983 && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
3984 && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
3985 && ( variableMultisampleRate == rhs.variableMultisampleRate )
3986 && ( inheritedQueries == rhs.inheritedQueries );
3987 }
3988
3989 bool operator!=( PhysicalDeviceFeatures const& rhs ) const
3990 {
3991 return !operator==( rhs );
3992 }
3993
3994 Bool32 robustBufferAccess;
3995 Bool32 fullDrawIndexUint32;
3996 Bool32 imageCubeArray;
3997 Bool32 independentBlend;
3998 Bool32 geometryShader;
3999 Bool32 tessellationShader;
4000 Bool32 sampleRateShading;
4001 Bool32 dualSrcBlend;
4002 Bool32 logicOp;
4003 Bool32 multiDrawIndirect;
4004 Bool32 drawIndirectFirstInstance;
4005 Bool32 depthClamp;
4006 Bool32 depthBiasClamp;
4007 Bool32 fillModeNonSolid;
4008 Bool32 depthBounds;
4009 Bool32 wideLines;
4010 Bool32 largePoints;
4011 Bool32 alphaToOne;
4012 Bool32 multiViewport;
4013 Bool32 samplerAnisotropy;
4014 Bool32 textureCompressionETC2;
4015 Bool32 textureCompressionASTC_LDR;
4016 Bool32 textureCompressionBC;
4017 Bool32 occlusionQueryPrecise;
4018 Bool32 pipelineStatisticsQuery;
4019 Bool32 vertexPipelineStoresAndAtomics;
4020 Bool32 fragmentStoresAndAtomics;
4021 Bool32 shaderTessellationAndGeometryPointSize;
4022 Bool32 shaderImageGatherExtended;
4023 Bool32 shaderStorageImageExtendedFormats;
4024 Bool32 shaderStorageImageMultisample;
4025 Bool32 shaderStorageImageReadWithoutFormat;
4026 Bool32 shaderStorageImageWriteWithoutFormat;
4027 Bool32 shaderUniformBufferArrayDynamicIndexing;
4028 Bool32 shaderSampledImageArrayDynamicIndexing;
4029 Bool32 shaderStorageBufferArrayDynamicIndexing;
4030 Bool32 shaderStorageImageArrayDynamicIndexing;
4031 Bool32 shaderClipDistance;
4032 Bool32 shaderCullDistance;
4033 Bool32 shaderFloat64;
4034 Bool32 shaderInt64;
4035 Bool32 shaderInt16;
4036 Bool32 shaderResourceResidency;
4037 Bool32 shaderResourceMinLod;
4038 Bool32 sparseBinding;
4039 Bool32 sparseResidencyBuffer;
4040 Bool32 sparseResidencyImage2D;
4041 Bool32 sparseResidencyImage3D;
4042 Bool32 sparseResidency2Samples;
4043 Bool32 sparseResidency4Samples;
4044 Bool32 sparseResidency8Samples;
4045 Bool32 sparseResidency16Samples;
4046 Bool32 sparseResidencyAliased;
4047 Bool32 variableMultisampleRate;
4048 Bool32 inheritedQueries;
4049 };
4050 static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
4051
4052 struct PhysicalDeviceSparseProperties
4053 {
4054 operator const VkPhysicalDeviceSparseProperties&() const
4055 {
4056 return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
4057 }
4058
4059 bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
4060 {
4061 return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
4062 && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
4063 && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
4064 && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
4065 && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
4066 }
4067
4068 bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
4069 {
4070 return !operator==( rhs );
4071 }
4072
4073 Bool32 residencyStandard2DBlockShape;
4074 Bool32 residencyStandard2DMultisampleBlockShape;
4075 Bool32 residencyStandard3DBlockShape;
4076 Bool32 residencyAlignedMipSize;
4077 Bool32 residencyNonResidentStrict;
4078 };
4079 static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
4080
4081 struct DrawIndirectCommand
4082 {
4083 DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 )
4084 : vertexCount( vertexCount_ )
4085 , instanceCount( instanceCount_ )
4086 , firstVertex( firstVertex_ )
4087 , firstInstance( firstInstance_ )
4088 {
4089 }
4090
4091 DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
4092 {
4093 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4094 }
4095
4096 DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
4097 {
4098 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4099 return *this;
4100 }
4101
4102 DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
4103 {
4104 vertexCount = vertexCount_;
4105 return *this;
4106 }
4107
4108 DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4109 {
4110 instanceCount = instanceCount_;
4111 return *this;
4112 }
4113
4114 DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
4115 {
4116 firstVertex = firstVertex_;
4117 return *this;
4118 }
4119
4120 DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4121 {
4122 firstInstance = firstInstance_;
4123 return *this;
4124 }
4125
4126 operator const VkDrawIndirectCommand&() const
4127 {
4128 return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
4129 }
4130
4131 bool operator==( DrawIndirectCommand const& rhs ) const
4132 {
4133 return ( vertexCount == rhs.vertexCount )
4134 && ( instanceCount == rhs.instanceCount )
4135 && ( firstVertex == rhs.firstVertex )
4136 && ( firstInstance == rhs.firstInstance );
4137 }
4138
4139 bool operator!=( DrawIndirectCommand const& rhs ) const
4140 {
4141 return !operator==( rhs );
4142 }
4143
4144 uint32_t vertexCount;
4145 uint32_t instanceCount;
4146 uint32_t firstVertex;
4147 uint32_t firstInstance;
4148 };
4149 static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
4150
4151 struct DrawIndexedIndirectCommand
4152 {
4153 DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 )
4154 : indexCount( indexCount_ )
4155 , instanceCount( instanceCount_ )
4156 , firstIndex( firstIndex_ )
4157 , vertexOffset( vertexOffset_ )
4158 , firstInstance( firstInstance_ )
4159 {
4160 }
4161
4162 DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
4163 {
4164 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4165 }
4166
4167 DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
4168 {
4169 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4170 return *this;
4171 }
4172
4173 DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
4174 {
4175 indexCount = indexCount_;
4176 return *this;
4177 }
4178
4179 DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4180 {
4181 instanceCount = instanceCount_;
4182 return *this;
4183 }
4184
4185 DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
4186 {
4187 firstIndex = firstIndex_;
4188 return *this;
4189 }
4190
4191 DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
4192 {
4193 vertexOffset = vertexOffset_;
4194 return *this;
4195 }
4196
4197 DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4198 {
4199 firstInstance = firstInstance_;
4200 return *this;
4201 }
4202
4203 operator const VkDrawIndexedIndirectCommand&() const
4204 {
4205 return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
4206 }
4207
4208 bool operator==( DrawIndexedIndirectCommand const& rhs ) const
4209 {
4210 return ( indexCount == rhs.indexCount )
4211 && ( instanceCount == rhs.instanceCount )
4212 && ( firstIndex == rhs.firstIndex )
4213 && ( vertexOffset == rhs.vertexOffset )
4214 && ( firstInstance == rhs.firstInstance );
4215 }
4216
4217 bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
4218 {
4219 return !operator==( rhs );
4220 }
4221
4222 uint32_t indexCount;
4223 uint32_t instanceCount;
4224 uint32_t firstIndex;
4225 int32_t vertexOffset;
4226 uint32_t firstInstance;
4227 };
4228 static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
4229
4230 struct DispatchIndirectCommand
4231 {
4232 DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 )
4233 : x( x_ )
4234 , y( y_ )
4235 , z( z_ )
4236 {
4237 }
4238
4239 DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
4240 {
4241 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4242 }
4243
4244 DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
4245 {
4246 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4247 return *this;
4248 }
4249
4250 DispatchIndirectCommand& setX( uint32_t x_ )
4251 {
4252 x = x_;
4253 return *this;
4254 }
4255
4256 DispatchIndirectCommand& setY( uint32_t y_ )
4257 {
4258 y = y_;
4259 return *this;
4260 }
4261
4262 DispatchIndirectCommand& setZ( uint32_t z_ )
4263 {
4264 z = z_;
4265 return *this;
4266 }
4267
4268 operator const VkDispatchIndirectCommand&() const
4269 {
4270 return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
4271 }
4272
4273 bool operator==( DispatchIndirectCommand const& rhs ) const
4274 {
4275 return ( x == rhs.x )
4276 && ( y == rhs.y )
4277 && ( z == rhs.z );
4278 }
4279
4280 bool operator!=( DispatchIndirectCommand const& rhs ) const
4281 {
4282 return !operator==( rhs );
4283 }
4284
4285 uint32_t x;
4286 uint32_t y;
4287 uint32_t z;
4288 };
4289 static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
4290
4291 struct DisplayPlanePropertiesKHR
4292 {
Lenny Komowb0a17f22016-08-11 11:23:15 -06004293 operator const VkDisplayPlanePropertiesKHR&() const
4294 {
4295 return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
4296 }
4297
4298 bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
4299 {
4300 return ( currentDisplay == rhs.currentDisplay )
4301 && ( currentStackIndex == rhs.currentStackIndex );
4302 }
4303
4304 bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
4305 {
4306 return !operator==( rhs );
4307 }
4308
4309 DisplayKHR currentDisplay;
4310 uint32_t currentStackIndex;
4311 };
4312 static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
4313
4314 struct DisplayModeParametersKHR
4315 {
4316 DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 )
4317 : visibleRegion( visibleRegion_ )
4318 , refreshRate( refreshRate_ )
4319 {
4320 }
4321
4322 DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
4323 {
4324 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4325 }
4326
4327 DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
4328 {
4329 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4330 return *this;
4331 }
4332
4333 DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
4334 {
4335 visibleRegion = visibleRegion_;
4336 return *this;
4337 }
4338
4339 DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
4340 {
4341 refreshRate = refreshRate_;
4342 return *this;
4343 }
4344
4345 operator const VkDisplayModeParametersKHR&() const
4346 {
4347 return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
4348 }
4349
4350 bool operator==( DisplayModeParametersKHR const& rhs ) const
4351 {
4352 return ( visibleRegion == rhs.visibleRegion )
4353 && ( refreshRate == rhs.refreshRate );
4354 }
4355
4356 bool operator!=( DisplayModeParametersKHR const& rhs ) const
4357 {
4358 return !operator==( rhs );
4359 }
4360
4361 Extent2D visibleRegion;
4362 uint32_t refreshRate;
4363 };
4364 static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
4365
4366 struct DisplayModePropertiesKHR
4367 {
Lenny Komowb0a17f22016-08-11 11:23:15 -06004368 operator const VkDisplayModePropertiesKHR&() const
4369 {
4370 return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
4371 }
4372
4373 bool operator==( DisplayModePropertiesKHR const& rhs ) const
4374 {
4375 return ( displayMode == rhs.displayMode )
4376 && ( parameters == rhs.parameters );
4377 }
4378
4379 bool operator!=( DisplayModePropertiesKHR const& rhs ) const
4380 {
4381 return !operator==( rhs );
4382 }
4383
4384 DisplayModeKHR displayMode;
4385 DisplayModeParametersKHR parameters;
4386 };
4387 static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
4388
4389 enum class ImageLayout
4390 {
4391 eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
4392 eGeneral = VK_IMAGE_LAYOUT_GENERAL,
4393 eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4394 eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
4395 eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
4396 eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
4397 eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4398 eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4399 ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
4400 ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
4401 };
4402
4403 struct DescriptorImageInfo
4404 {
4405 DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined )
4406 : sampler( sampler_ )
4407 , imageView( imageView_ )
4408 , imageLayout( imageLayout_ )
4409 {
4410 }
4411
4412 DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
4413 {
4414 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4415 }
4416
4417 DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
4418 {
4419 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4420 return *this;
4421 }
4422
4423 DescriptorImageInfo& setSampler( Sampler sampler_ )
4424 {
4425 sampler = sampler_;
4426 return *this;
4427 }
4428
4429 DescriptorImageInfo& setImageView( ImageView imageView_ )
4430 {
4431 imageView = imageView_;
4432 return *this;
4433 }
4434
4435 DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
4436 {
4437 imageLayout = imageLayout_;
4438 return *this;
4439 }
4440
4441 operator const VkDescriptorImageInfo&() const
4442 {
4443 return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
4444 }
4445
4446 bool operator==( DescriptorImageInfo const& rhs ) const
4447 {
4448 return ( sampler == rhs.sampler )
4449 && ( imageView == rhs.imageView )
4450 && ( imageLayout == rhs.imageLayout );
4451 }
4452
4453 bool operator!=( DescriptorImageInfo const& rhs ) const
4454 {
4455 return !operator==( rhs );
4456 }
4457
4458 Sampler sampler;
4459 ImageView imageView;
4460 ImageLayout imageLayout;
4461 };
4462 static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
4463
4464 struct AttachmentReference
4465 {
4466 AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined )
4467 : attachment( attachment_ )
4468 , layout( layout_ )
4469 {
4470 }
4471
4472 AttachmentReference( VkAttachmentReference const & rhs )
4473 {
4474 memcpy( this, &rhs, sizeof(AttachmentReference) );
4475 }
4476
4477 AttachmentReference& operator=( VkAttachmentReference const & rhs )
4478 {
4479 memcpy( this, &rhs, sizeof(AttachmentReference) );
4480 return *this;
4481 }
4482
4483 AttachmentReference& setAttachment( uint32_t attachment_ )
4484 {
4485 attachment = attachment_;
4486 return *this;
4487 }
4488
4489 AttachmentReference& setLayout( ImageLayout layout_ )
4490 {
4491 layout = layout_;
4492 return *this;
4493 }
4494
4495 operator const VkAttachmentReference&() const
4496 {
4497 return *reinterpret_cast<const VkAttachmentReference*>(this);
4498 }
4499
4500 bool operator==( AttachmentReference const& rhs ) const
4501 {
4502 return ( attachment == rhs.attachment )
4503 && ( layout == rhs.layout );
4504 }
4505
4506 bool operator!=( AttachmentReference const& rhs ) const
4507 {
4508 return !operator==( rhs );
4509 }
4510
4511 uint32_t attachment;
4512 ImageLayout layout;
4513 };
4514 static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
4515
4516 enum class AttachmentLoadOp
4517 {
4518 eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
4519 eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
4520 eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
4521 };
4522
4523 enum class AttachmentStoreOp
4524 {
4525 eStore = VK_ATTACHMENT_STORE_OP_STORE,
4526 eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
4527 };
4528
4529 enum class ImageType
4530 {
4531 e1D = VK_IMAGE_TYPE_1D,
4532 e2D = VK_IMAGE_TYPE_2D,
4533 e3D = VK_IMAGE_TYPE_3D
4534 };
4535
4536 enum class ImageTiling
4537 {
4538 eOptimal = VK_IMAGE_TILING_OPTIMAL,
4539 eLinear = VK_IMAGE_TILING_LINEAR
4540 };
4541
4542 enum class ImageViewType
4543 {
4544 e1D = VK_IMAGE_VIEW_TYPE_1D,
4545 e2D = VK_IMAGE_VIEW_TYPE_2D,
4546 e3D = VK_IMAGE_VIEW_TYPE_3D,
4547 eCube = VK_IMAGE_VIEW_TYPE_CUBE,
4548 e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
4549 e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
4550 eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
4551 };
4552
4553 enum class CommandBufferLevel
4554 {
4555 ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
4556 eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
4557 };
4558
4559 enum class ComponentSwizzle
4560 {
4561 eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
4562 eZero = VK_COMPONENT_SWIZZLE_ZERO,
4563 eOne = VK_COMPONENT_SWIZZLE_ONE,
4564 eR = VK_COMPONENT_SWIZZLE_R,
4565 eG = VK_COMPONENT_SWIZZLE_G,
4566 eB = VK_COMPONENT_SWIZZLE_B,
4567 eA = VK_COMPONENT_SWIZZLE_A
4568 };
4569
4570 struct ComponentMapping
4571 {
4572 ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
4573 : r( r_ )
4574 , g( g_ )
4575 , b( b_ )
4576 , a( a_ )
4577 {
4578 }
4579
4580 ComponentMapping( VkComponentMapping const & rhs )
4581 {
4582 memcpy( this, &rhs, sizeof(ComponentMapping) );
4583 }
4584
4585 ComponentMapping& operator=( VkComponentMapping const & rhs )
4586 {
4587 memcpy( this, &rhs, sizeof(ComponentMapping) );
4588 return *this;
4589 }
4590
4591 ComponentMapping& setR( ComponentSwizzle r_ )
4592 {
4593 r = r_;
4594 return *this;
4595 }
4596
4597 ComponentMapping& setG( ComponentSwizzle g_ )
4598 {
4599 g = g_;
4600 return *this;
4601 }
4602
4603 ComponentMapping& setB( ComponentSwizzle b_ )
4604 {
4605 b = b_;
4606 return *this;
4607 }
4608
4609 ComponentMapping& setA( ComponentSwizzle a_ )
4610 {
4611 a = a_;
4612 return *this;
4613 }
4614
4615 operator const VkComponentMapping&() const
4616 {
4617 return *reinterpret_cast<const VkComponentMapping*>(this);
4618 }
4619
4620 bool operator==( ComponentMapping const& rhs ) const
4621 {
4622 return ( r == rhs.r )
4623 && ( g == rhs.g )
4624 && ( b == rhs.b )
4625 && ( a == rhs.a );
4626 }
4627
4628 bool operator!=( ComponentMapping const& rhs ) const
4629 {
4630 return !operator==( rhs );
4631 }
4632
4633 ComponentSwizzle r;
4634 ComponentSwizzle g;
4635 ComponentSwizzle b;
4636 ComponentSwizzle a;
4637 };
4638 static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
4639
4640 enum class DescriptorType
4641 {
4642 eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
4643 eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
4644 eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
4645 eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
4646 eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
4647 eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
4648 eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
4649 eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
4650 eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
4651 eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
4652 eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
4653 };
4654
4655 struct DescriptorPoolSize
4656 {
4657 DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 )
4658 : type( type_ )
4659 , descriptorCount( descriptorCount_ )
4660 {
4661 }
4662
4663 DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
4664 {
4665 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
4666 }
4667
4668 DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
4669 {
4670 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
4671 return *this;
4672 }
4673
4674 DescriptorPoolSize& setType( DescriptorType type_ )
4675 {
4676 type = type_;
4677 return *this;
4678 }
4679
4680 DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
4681 {
4682 descriptorCount = descriptorCount_;
4683 return *this;
4684 }
4685
4686 operator const VkDescriptorPoolSize&() const
4687 {
4688 return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
4689 }
4690
4691 bool operator==( DescriptorPoolSize const& rhs ) const
4692 {
4693 return ( type == rhs.type )
4694 && ( descriptorCount == rhs.descriptorCount );
4695 }
4696
4697 bool operator!=( DescriptorPoolSize const& rhs ) const
4698 {
4699 return !operator==( rhs );
4700 }
4701
4702 DescriptorType type;
4703 uint32_t descriptorCount;
4704 };
4705 static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
4706
4707 enum class QueryType
4708 {
4709 eOcclusion = VK_QUERY_TYPE_OCCLUSION,
4710 ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
4711 eTimestamp = VK_QUERY_TYPE_TIMESTAMP
4712 };
4713
4714 enum class BorderColor
4715 {
4716 eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
4717 eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
4718 eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
4719 eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
4720 eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
4721 eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
4722 };
4723
4724 enum class PipelineBindPoint
4725 {
4726 eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
4727 eCompute = VK_PIPELINE_BIND_POINT_COMPUTE
4728 };
4729
4730 struct SubpassDescription
4731 {
4732 SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr )
4733 : flags( flags_ )
4734 , pipelineBindPoint( pipelineBindPoint_ )
4735 , inputAttachmentCount( inputAttachmentCount_ )
4736 , pInputAttachments( pInputAttachments_ )
4737 , colorAttachmentCount( colorAttachmentCount_ )
4738 , pColorAttachments( pColorAttachments_ )
4739 , pResolveAttachments( pResolveAttachments_ )
4740 , pDepthStencilAttachment( pDepthStencilAttachment_ )
4741 , preserveAttachmentCount( preserveAttachmentCount_ )
4742 , pPreserveAttachments( pPreserveAttachments_ )
4743 {
4744 }
4745
4746 SubpassDescription( VkSubpassDescription const & rhs )
4747 {
4748 memcpy( this, &rhs, sizeof(SubpassDescription) );
4749 }
4750
4751 SubpassDescription& operator=( VkSubpassDescription const & rhs )
4752 {
4753 memcpy( this, &rhs, sizeof(SubpassDescription) );
4754 return *this;
4755 }
4756
4757 SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
4758 {
4759 flags = flags_;
4760 return *this;
4761 }
4762
4763 SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
4764 {
4765 pipelineBindPoint = pipelineBindPoint_;
4766 return *this;
4767 }
4768
4769 SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
4770 {
4771 inputAttachmentCount = inputAttachmentCount_;
4772 return *this;
4773 }
4774
4775 SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
4776 {
4777 pInputAttachments = pInputAttachments_;
4778 return *this;
4779 }
4780
4781 SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
4782 {
4783 colorAttachmentCount = colorAttachmentCount_;
4784 return *this;
4785 }
4786
4787 SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
4788 {
4789 pColorAttachments = pColorAttachments_;
4790 return *this;
4791 }
4792
4793 SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
4794 {
4795 pResolveAttachments = pResolveAttachments_;
4796 return *this;
4797 }
4798
4799 SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
4800 {
4801 pDepthStencilAttachment = pDepthStencilAttachment_;
4802 return *this;
4803 }
4804
4805 SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
4806 {
4807 preserveAttachmentCount = preserveAttachmentCount_;
4808 return *this;
4809 }
4810
4811 SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
4812 {
4813 pPreserveAttachments = pPreserveAttachments_;
4814 return *this;
4815 }
4816
4817 operator const VkSubpassDescription&() const
4818 {
4819 return *reinterpret_cast<const VkSubpassDescription*>(this);
4820 }
4821
4822 bool operator==( SubpassDescription const& rhs ) const
4823 {
4824 return ( flags == rhs.flags )
4825 && ( pipelineBindPoint == rhs.pipelineBindPoint )
4826 && ( inputAttachmentCount == rhs.inputAttachmentCount )
4827 && ( pInputAttachments == rhs.pInputAttachments )
4828 && ( colorAttachmentCount == rhs.colorAttachmentCount )
4829 && ( pColorAttachments == rhs.pColorAttachments )
4830 && ( pResolveAttachments == rhs.pResolveAttachments )
4831 && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
4832 && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
4833 && ( pPreserveAttachments == rhs.pPreserveAttachments );
4834 }
4835
4836 bool operator!=( SubpassDescription const& rhs ) const
4837 {
4838 return !operator==( rhs );
4839 }
4840
4841 SubpassDescriptionFlags flags;
4842 PipelineBindPoint pipelineBindPoint;
4843 uint32_t inputAttachmentCount;
4844 const AttachmentReference* pInputAttachments;
4845 uint32_t colorAttachmentCount;
4846 const AttachmentReference* pColorAttachments;
4847 const AttachmentReference* pResolveAttachments;
4848 const AttachmentReference* pDepthStencilAttachment;
4849 uint32_t preserveAttachmentCount;
4850 const uint32_t* pPreserveAttachments;
4851 };
4852 static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
4853
4854 enum class PipelineCacheHeaderVersion
4855 {
4856 eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
4857 };
4858
4859 enum class PrimitiveTopology
4860 {
4861 ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
4862 eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
4863 eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
4864 eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
4865 eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
4866 eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
4867 eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
4868 eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
4869 eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
4870 eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
4871 ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
4872 };
4873
4874 enum class SharingMode
4875 {
4876 eExclusive = VK_SHARING_MODE_EXCLUSIVE,
4877 eConcurrent = VK_SHARING_MODE_CONCURRENT
4878 };
4879
4880 enum class IndexType
4881 {
4882 eUint16 = VK_INDEX_TYPE_UINT16,
4883 eUint32 = VK_INDEX_TYPE_UINT32
4884 };
4885
4886 enum class Filter
4887 {
4888 eNearest = VK_FILTER_NEAREST,
4889 eLinear = VK_FILTER_LINEAR,
4890 eCubicIMG = VK_FILTER_CUBIC_IMG
4891 };
4892
4893 enum class SamplerMipmapMode
4894 {
4895 eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
4896 eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
4897 };
4898
4899 enum class SamplerAddressMode
4900 {
4901 eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
4902 eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
4903 eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
4904 eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
4905 eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
4906 };
4907
4908 enum class CompareOp
4909 {
4910 eNever = VK_COMPARE_OP_NEVER,
4911 eLess = VK_COMPARE_OP_LESS,
4912 eEqual = VK_COMPARE_OP_EQUAL,
4913 eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
4914 eGreater = VK_COMPARE_OP_GREATER,
4915 eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
4916 eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
4917 eAlways = VK_COMPARE_OP_ALWAYS
4918 };
4919
4920 enum class PolygonMode
4921 {
4922 eFill = VK_POLYGON_MODE_FILL,
4923 eLine = VK_POLYGON_MODE_LINE,
4924 ePoint = VK_POLYGON_MODE_POINT
4925 };
4926
4927 enum class CullModeFlagBits
4928 {
4929 eNone = VK_CULL_MODE_NONE,
4930 eFront = VK_CULL_MODE_FRONT_BIT,
4931 eBack = VK_CULL_MODE_BACK_BIT,
4932 eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
4933 };
4934
4935 using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
4936
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07004937 VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -06004938 {
4939 return CullModeFlags( bit0 ) | bit1;
4940 }
4941
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07004942 VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
4943 {
4944 return ~( CullModeFlags( bits ) );
4945 }
4946
4947 template <> struct FlagTraits<CullModeFlagBits>
4948 {
4949 enum
4950 {
4951 allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
4952 };
4953 };
4954
Lenny Komowb0a17f22016-08-11 11:23:15 -06004955 enum class FrontFace
4956 {
4957 eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
4958 eClockwise = VK_FRONT_FACE_CLOCKWISE
4959 };
4960
4961 enum class BlendFactor
4962 {
4963 eZero = VK_BLEND_FACTOR_ZERO,
4964 eOne = VK_BLEND_FACTOR_ONE,
4965 eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
4966 eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
4967 eDstColor = VK_BLEND_FACTOR_DST_COLOR,
4968 eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
4969 eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
4970 eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
4971 eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
4972 eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
4973 eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
4974 eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
4975 eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
4976 eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
4977 eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
4978 eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
4979 eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
4980 eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
4981 eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
4982 };
4983
4984 enum class BlendOp
4985 {
4986 eAdd = VK_BLEND_OP_ADD,
4987 eSubtract = VK_BLEND_OP_SUBTRACT,
4988 eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
4989 eMin = VK_BLEND_OP_MIN,
4990 eMax = VK_BLEND_OP_MAX
4991 };
4992
4993 enum class StencilOp
4994 {
4995 eKeep = VK_STENCIL_OP_KEEP,
4996 eZero = VK_STENCIL_OP_ZERO,
4997 eReplace = VK_STENCIL_OP_REPLACE,
4998 eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
4999 eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
5000 eInvert = VK_STENCIL_OP_INVERT,
5001 eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
5002 eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
5003 };
5004
5005 struct StencilOpState
5006 {
5007 StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 )
5008 : failOp( failOp_ )
5009 , passOp( passOp_ )
5010 , depthFailOp( depthFailOp_ )
5011 , compareOp( compareOp_ )
5012 , compareMask( compareMask_ )
5013 , writeMask( writeMask_ )
5014 , reference( reference_ )
5015 {
5016 }
5017
5018 StencilOpState( VkStencilOpState const & rhs )
5019 {
5020 memcpy( this, &rhs, sizeof(StencilOpState) );
5021 }
5022
5023 StencilOpState& operator=( VkStencilOpState const & rhs )
5024 {
5025 memcpy( this, &rhs, sizeof(StencilOpState) );
5026 return *this;
5027 }
5028
5029 StencilOpState& setFailOp( StencilOp failOp_ )
5030 {
5031 failOp = failOp_;
5032 return *this;
5033 }
5034
5035 StencilOpState& setPassOp( StencilOp passOp_ )
5036 {
5037 passOp = passOp_;
5038 return *this;
5039 }
5040
5041 StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
5042 {
5043 depthFailOp = depthFailOp_;
5044 return *this;
5045 }
5046
5047 StencilOpState& setCompareOp( CompareOp compareOp_ )
5048 {
5049 compareOp = compareOp_;
5050 return *this;
5051 }
5052
5053 StencilOpState& setCompareMask( uint32_t compareMask_ )
5054 {
5055 compareMask = compareMask_;
5056 return *this;
5057 }
5058
5059 StencilOpState& setWriteMask( uint32_t writeMask_ )
5060 {
5061 writeMask = writeMask_;
5062 return *this;
5063 }
5064
5065 StencilOpState& setReference( uint32_t reference_ )
5066 {
5067 reference = reference_;
5068 return *this;
5069 }
5070
5071 operator const VkStencilOpState&() const
5072 {
5073 return *reinterpret_cast<const VkStencilOpState*>(this);
5074 }
5075
5076 bool operator==( StencilOpState const& rhs ) const
5077 {
5078 return ( failOp == rhs.failOp )
5079 && ( passOp == rhs.passOp )
5080 && ( depthFailOp == rhs.depthFailOp )
5081 && ( compareOp == rhs.compareOp )
5082 && ( compareMask == rhs.compareMask )
5083 && ( writeMask == rhs.writeMask )
5084 && ( reference == rhs.reference );
5085 }
5086
5087 bool operator!=( StencilOpState const& rhs ) const
5088 {
5089 return !operator==( rhs );
5090 }
5091
5092 StencilOp failOp;
5093 StencilOp passOp;
5094 StencilOp depthFailOp;
5095 CompareOp compareOp;
5096 uint32_t compareMask;
5097 uint32_t writeMask;
5098 uint32_t reference;
5099 };
5100 static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
5101
5102 enum class LogicOp
5103 {
5104 eClear = VK_LOGIC_OP_CLEAR,
5105 eAnd = VK_LOGIC_OP_AND,
5106 eAndReverse = VK_LOGIC_OP_AND_REVERSE,
5107 eCopy = VK_LOGIC_OP_COPY,
5108 eAndInverted = VK_LOGIC_OP_AND_INVERTED,
5109 eNoOp = VK_LOGIC_OP_NO_OP,
5110 eXor = VK_LOGIC_OP_XOR,
5111 eOr = VK_LOGIC_OP_OR,
5112 eNor = VK_LOGIC_OP_NOR,
5113 eEquivalent = VK_LOGIC_OP_EQUIVALENT,
5114 eInvert = VK_LOGIC_OP_INVERT,
5115 eOrReverse = VK_LOGIC_OP_OR_REVERSE,
5116 eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
5117 eOrInverted = VK_LOGIC_OP_OR_INVERTED,
5118 eNand = VK_LOGIC_OP_NAND,
5119 eSet = VK_LOGIC_OP_SET
5120 };
5121
5122 enum class InternalAllocationType
5123 {
5124 eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
5125 };
5126
5127 enum class SystemAllocationScope
5128 {
5129 eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
5130 eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
5131 eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
5132 eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
5133 eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
5134 };
5135
5136 enum class PhysicalDeviceType
5137 {
5138 eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
5139 eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
5140 eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
5141 eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
5142 eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
5143 };
5144
5145 enum class VertexInputRate
5146 {
5147 eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
5148 eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
5149 };
5150
5151 struct VertexInputBindingDescription
5152 {
5153 VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex )
5154 : binding( binding_ )
5155 , stride( stride_ )
5156 , inputRate( inputRate_ )
5157 {
5158 }
5159
5160 VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
5161 {
5162 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5163 }
5164
5165 VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
5166 {
5167 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5168 return *this;
5169 }
5170
5171 VertexInputBindingDescription& setBinding( uint32_t binding_ )
5172 {
5173 binding = binding_;
5174 return *this;
5175 }
5176
5177 VertexInputBindingDescription& setStride( uint32_t stride_ )
5178 {
5179 stride = stride_;
5180 return *this;
5181 }
5182
5183 VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
5184 {
5185 inputRate = inputRate_;
5186 return *this;
5187 }
5188
5189 operator const VkVertexInputBindingDescription&() const
5190 {
5191 return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
5192 }
5193
5194 bool operator==( VertexInputBindingDescription const& rhs ) const
5195 {
5196 return ( binding == rhs.binding )
5197 && ( stride == rhs.stride )
5198 && ( inputRate == rhs.inputRate );
5199 }
5200
5201 bool operator!=( VertexInputBindingDescription const& rhs ) const
5202 {
5203 return !operator==( rhs );
5204 }
5205
5206 uint32_t binding;
5207 uint32_t stride;
5208 VertexInputRate inputRate;
5209 };
5210 static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
5211
5212 enum class Format
5213 {
5214 eUndefined = VK_FORMAT_UNDEFINED,
5215 eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
5216 eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
5217 eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
5218 eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
5219 eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
5220 eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
5221 eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
5222 eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
5223 eR8Unorm = VK_FORMAT_R8_UNORM,
5224 eR8Snorm = VK_FORMAT_R8_SNORM,
5225 eR8Uscaled = VK_FORMAT_R8_USCALED,
5226 eR8Sscaled = VK_FORMAT_R8_SSCALED,
5227 eR8Uint = VK_FORMAT_R8_UINT,
5228 eR8Sint = VK_FORMAT_R8_SINT,
5229 eR8Srgb = VK_FORMAT_R8_SRGB,
5230 eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
5231 eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
5232 eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
5233 eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
5234 eR8G8Uint = VK_FORMAT_R8G8_UINT,
5235 eR8G8Sint = VK_FORMAT_R8G8_SINT,
5236 eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
5237 eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
5238 eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
5239 eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
5240 eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
5241 eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
5242 eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
5243 eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
5244 eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
5245 eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
5246 eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
5247 eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
5248 eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
5249 eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
5250 eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
5251 eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
5252 eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
5253 eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
5254 eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
5255 eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
5256 eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
5257 eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
5258 eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
5259 eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
5260 eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
5261 eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
5262 eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
5263 eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
5264 eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
5265 eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
5266 eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
5267 eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
5268 eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
5269 eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
5270 eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
5271 eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
5272 eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
5273 eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
5274 eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
5275 eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
5276 eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
5277 eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
5278 eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
5279 eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
5280 eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
5281 eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
5282 eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
5283 eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
5284 eR16Unorm = VK_FORMAT_R16_UNORM,
5285 eR16Snorm = VK_FORMAT_R16_SNORM,
5286 eR16Uscaled = VK_FORMAT_R16_USCALED,
5287 eR16Sscaled = VK_FORMAT_R16_SSCALED,
5288 eR16Uint = VK_FORMAT_R16_UINT,
5289 eR16Sint = VK_FORMAT_R16_SINT,
5290 eR16Sfloat = VK_FORMAT_R16_SFLOAT,
5291 eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
5292 eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
5293 eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
5294 eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
5295 eR16G16Uint = VK_FORMAT_R16G16_UINT,
5296 eR16G16Sint = VK_FORMAT_R16G16_SINT,
5297 eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
5298 eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
5299 eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
5300 eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
5301 eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
5302 eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
5303 eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
5304 eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
5305 eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
5306 eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
5307 eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
5308 eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
5309 eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
5310 eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
5311 eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
5312 eR32Uint = VK_FORMAT_R32_UINT,
5313 eR32Sint = VK_FORMAT_R32_SINT,
5314 eR32Sfloat = VK_FORMAT_R32_SFLOAT,
5315 eR32G32Uint = VK_FORMAT_R32G32_UINT,
5316 eR32G32Sint = VK_FORMAT_R32G32_SINT,
5317 eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
5318 eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
5319 eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
5320 eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
5321 eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
5322 eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
5323 eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
5324 eR64Uint = VK_FORMAT_R64_UINT,
5325 eR64Sint = VK_FORMAT_R64_SINT,
5326 eR64Sfloat = VK_FORMAT_R64_SFLOAT,
5327 eR64G64Uint = VK_FORMAT_R64G64_UINT,
5328 eR64G64Sint = VK_FORMAT_R64G64_SINT,
5329 eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
5330 eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
5331 eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
5332 eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
5333 eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
5334 eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
5335 eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
5336 eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
5337 eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
5338 eD16Unorm = VK_FORMAT_D16_UNORM,
5339 eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
5340 eD32Sfloat = VK_FORMAT_D32_SFLOAT,
5341 eS8Uint = VK_FORMAT_S8_UINT,
5342 eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
5343 eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
5344 eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
5345 eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
5346 eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
5347 eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
5348 eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
5349 eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
5350 eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
5351 eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
5352 eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
5353 eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
5354 eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
5355 eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
5356 eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
5357 eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
5358 eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
5359 eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
5360 eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
5361 eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
5362 eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
5363 eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
5364 eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
5365 eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
5366 eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
5367 eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
5368 eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
5369 eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
5370 eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
5371 eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
5372 eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
5373 eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
5374 eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
5375 eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
5376 eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
5377 eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
5378 eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
5379 eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
5380 eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
5381 eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
5382 eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
5383 eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
5384 eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
5385 eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
5386 eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
5387 eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
5388 eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
5389 eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
5390 eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
5391 eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
5392 eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
5393 eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
5394 eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
5395 eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
5396 eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
5397 eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
Lenny Komow339ffcd2016-08-26 14:10:08 -06005398 eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
5399 ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
5400 ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
5401 ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
5402 ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
5403 ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
5404 ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
5405 ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
5406 ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
Lenny Komowb0a17f22016-08-11 11:23:15 -06005407 };
5408
5409 struct VertexInputAttributeDescription
5410 {
5411 VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 )
5412 : location( location_ )
5413 , binding( binding_ )
5414 , format( format_ )
5415 , offset( offset_ )
5416 {
5417 }
5418
5419 VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
5420 {
5421 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5422 }
5423
5424 VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
5425 {
5426 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5427 return *this;
5428 }
5429
5430 VertexInputAttributeDescription& setLocation( uint32_t location_ )
5431 {
5432 location = location_;
5433 return *this;
5434 }
5435
5436 VertexInputAttributeDescription& setBinding( uint32_t binding_ )
5437 {
5438 binding = binding_;
5439 return *this;
5440 }
5441
5442 VertexInputAttributeDescription& setFormat( Format format_ )
5443 {
5444 format = format_;
5445 return *this;
5446 }
5447
5448 VertexInputAttributeDescription& setOffset( uint32_t offset_ )
5449 {
5450 offset = offset_;
5451 return *this;
5452 }
5453
5454 operator const VkVertexInputAttributeDescription&() const
5455 {
5456 return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
5457 }
5458
5459 bool operator==( VertexInputAttributeDescription const& rhs ) const
5460 {
5461 return ( location == rhs.location )
5462 && ( binding == rhs.binding )
5463 && ( format == rhs.format )
5464 && ( offset == rhs.offset );
5465 }
5466
5467 bool operator!=( VertexInputAttributeDescription const& rhs ) const
5468 {
5469 return !operator==( rhs );
5470 }
5471
5472 uint32_t location;
5473 uint32_t binding;
5474 Format format;
5475 uint32_t offset;
5476 };
5477 static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
5478
5479 enum class StructureType
5480 {
5481 eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
5482 eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
5483 eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
5484 eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
5485 eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
5486 eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
5487 eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
5488 eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
5489 eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
5490 eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
5491 eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
5492 eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
5493 eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
5494 eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5495 eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
5496 eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5497 eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
5498 ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
5499 ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
5500 ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
5501 ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
5502 ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
5503 ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
5504 ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
5505 ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
5506 ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
5507 ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
5508 ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
5509 eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
5510 eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
5511 ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
5512 eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
5513 eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
5514 eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5515 eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5516 eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5517 eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
5518 eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
5519 eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
5520 eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
5521 eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
5522 eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
5523 eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
5524 eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
5525 eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
5526 eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
5527 eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
5528 eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
5529 eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
5530 eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
5531 ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
5532 eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
5533 eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
5534 eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
5535 eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
5536 eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
5537 eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
5538 eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR,
5539 eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
5540 eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
5541 eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
5542 ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
5543 eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
5544 eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
5545 eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
5546 eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
5547 eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
Lenny Komow96962992016-08-31 15:03:49 -06005548 eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
5549 eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
5550 eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
5551 eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
5552 eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
Lenny Komow5b8df842016-09-29 14:16:59 -06005553 eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
Mark Youngb5f087a2017-01-19 21:10:49 -07005554 ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
5555 ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
5556 eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
5557 eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
5558 ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
5559 eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
5560 ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
5561 eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
5562 ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07005563 eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
Mark Youngb5f087a2017-01-19 21:10:49 -07005564 eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07005565 eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
5566 eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
5567 eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
5568 eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
5569 eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
Mark Youngb5f087a2017-01-19 21:10:49 -07005570 eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
5571 eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT,
5572 eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
5573 eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
5574 eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
5575 eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT
Lenny Komowb0a17f22016-08-11 11:23:15 -06005576 };
5577
5578 struct ApplicationInfo
5579 {
5580 ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 )
5581 : sType( StructureType::eApplicationInfo )
5582 , pNext( nullptr )
5583 , pApplicationName( pApplicationName_ )
5584 , applicationVersion( applicationVersion_ )
5585 , pEngineName( pEngineName_ )
5586 , engineVersion( engineVersion_ )
5587 , apiVersion( apiVersion_ )
5588 {
5589 }
5590
5591 ApplicationInfo( VkApplicationInfo const & rhs )
5592 {
5593 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5594 }
5595
5596 ApplicationInfo& operator=( VkApplicationInfo const & rhs )
5597 {
5598 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5599 return *this;
5600 }
5601
5602 ApplicationInfo& setSType( StructureType sType_ )
5603 {
5604 sType = sType_;
5605 return *this;
5606 }
5607
5608 ApplicationInfo& setPNext( const void* pNext_ )
5609 {
5610 pNext = pNext_;
5611 return *this;
5612 }
5613
5614 ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
5615 {
5616 pApplicationName = pApplicationName_;
5617 return *this;
5618 }
5619
5620 ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
5621 {
5622 applicationVersion = applicationVersion_;
5623 return *this;
5624 }
5625
5626 ApplicationInfo& setPEngineName( const char* pEngineName_ )
5627 {
5628 pEngineName = pEngineName_;
5629 return *this;
5630 }
5631
5632 ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
5633 {
5634 engineVersion = engineVersion_;
5635 return *this;
5636 }
5637
5638 ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
5639 {
5640 apiVersion = apiVersion_;
5641 return *this;
5642 }
5643
5644 operator const VkApplicationInfo&() const
5645 {
5646 return *reinterpret_cast<const VkApplicationInfo*>(this);
5647 }
5648
5649 bool operator==( ApplicationInfo const& rhs ) const
5650 {
5651 return ( sType == rhs.sType )
5652 && ( pNext == rhs.pNext )
5653 && ( pApplicationName == rhs.pApplicationName )
5654 && ( applicationVersion == rhs.applicationVersion )
5655 && ( pEngineName == rhs.pEngineName )
5656 && ( engineVersion == rhs.engineVersion )
5657 && ( apiVersion == rhs.apiVersion );
5658 }
5659
5660 bool operator!=( ApplicationInfo const& rhs ) const
5661 {
5662 return !operator==( rhs );
5663 }
5664
5665 private:
5666 StructureType sType;
5667
5668 public:
5669 const void* pNext;
5670 const char* pApplicationName;
5671 uint32_t applicationVersion;
5672 const char* pEngineName;
5673 uint32_t engineVersion;
5674 uint32_t apiVersion;
5675 };
5676 static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
5677
5678 struct DeviceQueueCreateInfo
5679 {
5680 DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr )
5681 : sType( StructureType::eDeviceQueueCreateInfo )
5682 , pNext( nullptr )
5683 , flags( flags_ )
5684 , queueFamilyIndex( queueFamilyIndex_ )
5685 , queueCount( queueCount_ )
5686 , pQueuePriorities( pQueuePriorities_ )
5687 {
5688 }
5689
5690 DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
5691 {
5692 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
5693 }
5694
5695 DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
5696 {
5697 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
5698 return *this;
5699 }
5700
5701 DeviceQueueCreateInfo& setSType( StructureType sType_ )
5702 {
5703 sType = sType_;
5704 return *this;
5705 }
5706
5707 DeviceQueueCreateInfo& setPNext( const void* pNext_ )
5708 {
5709 pNext = pNext_;
5710 return *this;
5711 }
5712
5713 DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
5714 {
5715 flags = flags_;
5716 return *this;
5717 }
5718
5719 DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
5720 {
5721 queueFamilyIndex = queueFamilyIndex_;
5722 return *this;
5723 }
5724
5725 DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
5726 {
5727 queueCount = queueCount_;
5728 return *this;
5729 }
5730
5731 DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
5732 {
5733 pQueuePriorities = pQueuePriorities_;
5734 return *this;
5735 }
5736
5737 operator const VkDeviceQueueCreateInfo&() const
5738 {
5739 return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
5740 }
5741
5742 bool operator==( DeviceQueueCreateInfo const& rhs ) const
5743 {
5744 return ( sType == rhs.sType )
5745 && ( pNext == rhs.pNext )
5746 && ( flags == rhs.flags )
5747 && ( queueFamilyIndex == rhs.queueFamilyIndex )
5748 && ( queueCount == rhs.queueCount )
5749 && ( pQueuePriorities == rhs.pQueuePriorities );
5750 }
5751
5752 bool operator!=( DeviceQueueCreateInfo const& rhs ) const
5753 {
5754 return !operator==( rhs );
5755 }
5756
5757 private:
5758 StructureType sType;
5759
5760 public:
5761 const void* pNext;
5762 DeviceQueueCreateFlags flags;
5763 uint32_t queueFamilyIndex;
5764 uint32_t queueCount;
5765 const float* pQueuePriorities;
5766 };
5767 static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
5768
5769 struct DeviceCreateInfo
5770 {
5771 DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
5772 : sType( StructureType::eDeviceCreateInfo )
5773 , pNext( nullptr )
5774 , flags( flags_ )
5775 , queueCreateInfoCount( queueCreateInfoCount_ )
5776 , pQueueCreateInfos( pQueueCreateInfos_ )
5777 , enabledLayerCount( enabledLayerCount_ )
5778 , ppEnabledLayerNames( ppEnabledLayerNames_ )
5779 , enabledExtensionCount( enabledExtensionCount_ )
5780 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
5781 , pEnabledFeatures( pEnabledFeatures_ )
5782 {
5783 }
5784
5785 DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
5786 {
5787 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
5788 }
5789
5790 DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
5791 {
5792 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
5793 return *this;
5794 }
5795
5796 DeviceCreateInfo& setSType( StructureType sType_ )
5797 {
5798 sType = sType_;
5799 return *this;
5800 }
5801
5802 DeviceCreateInfo& setPNext( const void* pNext_ )
5803 {
5804 pNext = pNext_;
5805 return *this;
5806 }
5807
5808 DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
5809 {
5810 flags = flags_;
5811 return *this;
5812 }
5813
5814 DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
5815 {
5816 queueCreateInfoCount = queueCreateInfoCount_;
5817 return *this;
5818 }
5819
5820 DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
5821 {
5822 pQueueCreateInfos = pQueueCreateInfos_;
5823 return *this;
5824 }
5825
5826 DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
5827 {
5828 enabledLayerCount = enabledLayerCount_;
5829 return *this;
5830 }
5831
5832 DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
5833 {
5834 ppEnabledLayerNames = ppEnabledLayerNames_;
5835 return *this;
5836 }
5837
5838 DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
5839 {
5840 enabledExtensionCount = enabledExtensionCount_;
5841 return *this;
5842 }
5843
5844 DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
5845 {
5846 ppEnabledExtensionNames = ppEnabledExtensionNames_;
5847 return *this;
5848 }
5849
5850 DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
5851 {
5852 pEnabledFeatures = pEnabledFeatures_;
5853 return *this;
5854 }
5855
5856 operator const VkDeviceCreateInfo&() const
5857 {
5858 return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
5859 }
5860
5861 bool operator==( DeviceCreateInfo const& rhs ) const
5862 {
5863 return ( sType == rhs.sType )
5864 && ( pNext == rhs.pNext )
5865 && ( flags == rhs.flags )
5866 && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
5867 && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
5868 && ( enabledLayerCount == rhs.enabledLayerCount )
5869 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
5870 && ( enabledExtensionCount == rhs.enabledExtensionCount )
5871 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
5872 && ( pEnabledFeatures == rhs.pEnabledFeatures );
5873 }
5874
5875 bool operator!=( DeviceCreateInfo const& rhs ) const
5876 {
5877 return !operator==( rhs );
5878 }
5879
5880 private:
5881 StructureType sType;
5882
5883 public:
5884 const void* pNext;
5885 DeviceCreateFlags flags;
5886 uint32_t queueCreateInfoCount;
5887 const DeviceQueueCreateInfo* pQueueCreateInfos;
5888 uint32_t enabledLayerCount;
5889 const char* const* ppEnabledLayerNames;
5890 uint32_t enabledExtensionCount;
5891 const char* const* ppEnabledExtensionNames;
5892 const PhysicalDeviceFeatures* pEnabledFeatures;
5893 };
5894 static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
5895
5896 struct InstanceCreateInfo
5897 {
5898 InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr )
5899 : sType( StructureType::eInstanceCreateInfo )
5900 , pNext( nullptr )
5901 , flags( flags_ )
5902 , pApplicationInfo( pApplicationInfo_ )
5903 , enabledLayerCount( enabledLayerCount_ )
5904 , ppEnabledLayerNames( ppEnabledLayerNames_ )
5905 , enabledExtensionCount( enabledExtensionCount_ )
5906 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
5907 {
5908 }
5909
5910 InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
5911 {
5912 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
5913 }
5914
5915 InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
5916 {
5917 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
5918 return *this;
5919 }
5920
5921 InstanceCreateInfo& setSType( StructureType sType_ )
5922 {
5923 sType = sType_;
5924 return *this;
5925 }
5926
5927 InstanceCreateInfo& setPNext( const void* pNext_ )
5928 {
5929 pNext = pNext_;
5930 return *this;
5931 }
5932
5933 InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
5934 {
5935 flags = flags_;
5936 return *this;
5937 }
5938
5939 InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
5940 {
5941 pApplicationInfo = pApplicationInfo_;
5942 return *this;
5943 }
5944
5945 InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
5946 {
5947 enabledLayerCount = enabledLayerCount_;
5948 return *this;
5949 }
5950
5951 InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
5952 {
5953 ppEnabledLayerNames = ppEnabledLayerNames_;
5954 return *this;
5955 }
5956
5957 InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
5958 {
5959 enabledExtensionCount = enabledExtensionCount_;
5960 return *this;
5961 }
5962
5963 InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
5964 {
5965 ppEnabledExtensionNames = ppEnabledExtensionNames_;
5966 return *this;
5967 }
5968
5969 operator const VkInstanceCreateInfo&() const
5970 {
5971 return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
5972 }
5973
5974 bool operator==( InstanceCreateInfo const& rhs ) const
5975 {
5976 return ( sType == rhs.sType )
5977 && ( pNext == rhs.pNext )
5978 && ( flags == rhs.flags )
5979 && ( pApplicationInfo == rhs.pApplicationInfo )
5980 && ( enabledLayerCount == rhs.enabledLayerCount )
5981 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
5982 && ( enabledExtensionCount == rhs.enabledExtensionCount )
5983 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
5984 }
5985
5986 bool operator!=( InstanceCreateInfo const& rhs ) const
5987 {
5988 return !operator==( rhs );
5989 }
5990
5991 private:
5992 StructureType sType;
5993
5994 public:
5995 const void* pNext;
5996 InstanceCreateFlags flags;
5997 const ApplicationInfo* pApplicationInfo;
5998 uint32_t enabledLayerCount;
5999 const char* const* ppEnabledLayerNames;
6000 uint32_t enabledExtensionCount;
6001 const char* const* ppEnabledExtensionNames;
6002 };
6003 static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
6004
6005 struct MemoryAllocateInfo
6006 {
6007 MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 )
6008 : sType( StructureType::eMemoryAllocateInfo )
6009 , pNext( nullptr )
6010 , allocationSize( allocationSize_ )
6011 , memoryTypeIndex( memoryTypeIndex_ )
6012 {
6013 }
6014
6015 MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
6016 {
6017 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
6018 }
6019
6020 MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
6021 {
6022 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
6023 return *this;
6024 }
6025
6026 MemoryAllocateInfo& setSType( StructureType sType_ )
6027 {
6028 sType = sType_;
6029 return *this;
6030 }
6031
6032 MemoryAllocateInfo& setPNext( const void* pNext_ )
6033 {
6034 pNext = pNext_;
6035 return *this;
6036 }
6037
6038 MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
6039 {
6040 allocationSize = allocationSize_;
6041 return *this;
6042 }
6043
6044 MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
6045 {
6046 memoryTypeIndex = memoryTypeIndex_;
6047 return *this;
6048 }
6049
6050 operator const VkMemoryAllocateInfo&() const
6051 {
6052 return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
6053 }
6054
6055 bool operator==( MemoryAllocateInfo const& rhs ) const
6056 {
6057 return ( sType == rhs.sType )
6058 && ( pNext == rhs.pNext )
6059 && ( allocationSize == rhs.allocationSize )
6060 && ( memoryTypeIndex == rhs.memoryTypeIndex );
6061 }
6062
6063 bool operator!=( MemoryAllocateInfo const& rhs ) const
6064 {
6065 return !operator==( rhs );
6066 }
6067
6068 private:
6069 StructureType sType;
6070
6071 public:
6072 const void* pNext;
6073 DeviceSize allocationSize;
6074 uint32_t memoryTypeIndex;
6075 };
6076 static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
6077
6078 struct MappedMemoryRange
6079 {
6080 MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
6081 : sType( StructureType::eMappedMemoryRange )
6082 , pNext( nullptr )
6083 , memory( memory_ )
6084 , offset( offset_ )
6085 , size( size_ )
6086 {
6087 }
6088
6089 MappedMemoryRange( VkMappedMemoryRange const & rhs )
6090 {
6091 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6092 }
6093
6094 MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
6095 {
6096 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6097 return *this;
6098 }
6099
6100 MappedMemoryRange& setSType( StructureType sType_ )
6101 {
6102 sType = sType_;
6103 return *this;
6104 }
6105
6106 MappedMemoryRange& setPNext( const void* pNext_ )
6107 {
6108 pNext = pNext_;
6109 return *this;
6110 }
6111
6112 MappedMemoryRange& setMemory( DeviceMemory memory_ )
6113 {
6114 memory = memory_;
6115 return *this;
6116 }
6117
6118 MappedMemoryRange& setOffset( DeviceSize offset_ )
6119 {
6120 offset = offset_;
6121 return *this;
6122 }
6123
6124 MappedMemoryRange& setSize( DeviceSize size_ )
6125 {
6126 size = size_;
6127 return *this;
6128 }
6129
6130 operator const VkMappedMemoryRange&() const
6131 {
6132 return *reinterpret_cast<const VkMappedMemoryRange*>(this);
6133 }
6134
6135 bool operator==( MappedMemoryRange const& rhs ) const
6136 {
6137 return ( sType == rhs.sType )
6138 && ( pNext == rhs.pNext )
6139 && ( memory == rhs.memory )
6140 && ( offset == rhs.offset )
6141 && ( size == rhs.size );
6142 }
6143
6144 bool operator!=( MappedMemoryRange const& rhs ) const
6145 {
6146 return !operator==( rhs );
6147 }
6148
6149 private:
6150 StructureType sType;
6151
6152 public:
6153 const void* pNext;
6154 DeviceMemory memory;
6155 DeviceSize offset;
6156 DeviceSize size;
6157 };
6158 static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
6159
6160 struct WriteDescriptorSet
6161 {
6162 WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr )
6163 : sType( StructureType::eWriteDescriptorSet )
6164 , pNext( nullptr )
6165 , dstSet( dstSet_ )
6166 , dstBinding( dstBinding_ )
6167 , dstArrayElement( dstArrayElement_ )
6168 , descriptorCount( descriptorCount_ )
6169 , descriptorType( descriptorType_ )
6170 , pImageInfo( pImageInfo_ )
6171 , pBufferInfo( pBufferInfo_ )
6172 , pTexelBufferView( pTexelBufferView_ )
6173 {
6174 }
6175
6176 WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
6177 {
6178 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6179 }
6180
6181 WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
6182 {
6183 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6184 return *this;
6185 }
6186
6187 WriteDescriptorSet& setSType( StructureType sType_ )
6188 {
6189 sType = sType_;
6190 return *this;
6191 }
6192
6193 WriteDescriptorSet& setPNext( const void* pNext_ )
6194 {
6195 pNext = pNext_;
6196 return *this;
6197 }
6198
6199 WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6200 {
6201 dstSet = dstSet_;
6202 return *this;
6203 }
6204
6205 WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6206 {
6207 dstBinding = dstBinding_;
6208 return *this;
6209 }
6210
6211 WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6212 {
6213 dstArrayElement = dstArrayElement_;
6214 return *this;
6215 }
6216
6217 WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6218 {
6219 descriptorCount = descriptorCount_;
6220 return *this;
6221 }
6222
6223 WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
6224 {
6225 descriptorType = descriptorType_;
6226 return *this;
6227 }
6228
6229 WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
6230 {
6231 pImageInfo = pImageInfo_;
6232 return *this;
6233 }
6234
6235 WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
6236 {
6237 pBufferInfo = pBufferInfo_;
6238 return *this;
6239 }
6240
6241 WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
6242 {
6243 pTexelBufferView = pTexelBufferView_;
6244 return *this;
6245 }
6246
6247 operator const VkWriteDescriptorSet&() const
6248 {
6249 return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
6250 }
6251
6252 bool operator==( WriteDescriptorSet const& rhs ) const
6253 {
6254 return ( sType == rhs.sType )
6255 && ( pNext == rhs.pNext )
6256 && ( dstSet == rhs.dstSet )
6257 && ( dstBinding == rhs.dstBinding )
6258 && ( dstArrayElement == rhs.dstArrayElement )
6259 && ( descriptorCount == rhs.descriptorCount )
6260 && ( descriptorType == rhs.descriptorType )
6261 && ( pImageInfo == rhs.pImageInfo )
6262 && ( pBufferInfo == rhs.pBufferInfo )
6263 && ( pTexelBufferView == rhs.pTexelBufferView );
6264 }
6265
6266 bool operator!=( WriteDescriptorSet const& rhs ) const
6267 {
6268 return !operator==( rhs );
6269 }
6270
6271 private:
6272 StructureType sType;
6273
6274 public:
6275 const void* pNext;
6276 DescriptorSet dstSet;
6277 uint32_t dstBinding;
6278 uint32_t dstArrayElement;
6279 uint32_t descriptorCount;
6280 DescriptorType descriptorType;
6281 const DescriptorImageInfo* pImageInfo;
6282 const DescriptorBufferInfo* pBufferInfo;
6283 const BufferView* pTexelBufferView;
6284 };
6285 static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
6286
6287 struct CopyDescriptorSet
6288 {
6289 CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 )
6290 : sType( StructureType::eCopyDescriptorSet )
6291 , pNext( nullptr )
6292 , srcSet( srcSet_ )
6293 , srcBinding( srcBinding_ )
6294 , srcArrayElement( srcArrayElement_ )
6295 , dstSet( dstSet_ )
6296 , dstBinding( dstBinding_ )
6297 , dstArrayElement( dstArrayElement_ )
6298 , descriptorCount( descriptorCount_ )
6299 {
6300 }
6301
6302 CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
6303 {
6304 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6305 }
6306
6307 CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
6308 {
6309 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6310 return *this;
6311 }
6312
6313 CopyDescriptorSet& setSType( StructureType sType_ )
6314 {
6315 sType = sType_;
6316 return *this;
6317 }
6318
6319 CopyDescriptorSet& setPNext( const void* pNext_ )
6320 {
6321 pNext = pNext_;
6322 return *this;
6323 }
6324
6325 CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
6326 {
6327 srcSet = srcSet_;
6328 return *this;
6329 }
6330
6331 CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
6332 {
6333 srcBinding = srcBinding_;
6334 return *this;
6335 }
6336
6337 CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
6338 {
6339 srcArrayElement = srcArrayElement_;
6340 return *this;
6341 }
6342
6343 CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6344 {
6345 dstSet = dstSet_;
6346 return *this;
6347 }
6348
6349 CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6350 {
6351 dstBinding = dstBinding_;
6352 return *this;
6353 }
6354
6355 CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6356 {
6357 dstArrayElement = dstArrayElement_;
6358 return *this;
6359 }
6360
6361 CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6362 {
6363 descriptorCount = descriptorCount_;
6364 return *this;
6365 }
6366
6367 operator const VkCopyDescriptorSet&() const
6368 {
6369 return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
6370 }
6371
6372 bool operator==( CopyDescriptorSet const& rhs ) const
6373 {
6374 return ( sType == rhs.sType )
6375 && ( pNext == rhs.pNext )
6376 && ( srcSet == rhs.srcSet )
6377 && ( srcBinding == rhs.srcBinding )
6378 && ( srcArrayElement == rhs.srcArrayElement )
6379 && ( dstSet == rhs.dstSet )
6380 && ( dstBinding == rhs.dstBinding )
6381 && ( dstArrayElement == rhs.dstArrayElement )
6382 && ( descriptorCount == rhs.descriptorCount );
6383 }
6384
6385 bool operator!=( CopyDescriptorSet const& rhs ) const
6386 {
6387 return !operator==( rhs );
6388 }
6389
6390 private:
6391 StructureType sType;
6392
6393 public:
6394 const void* pNext;
6395 DescriptorSet srcSet;
6396 uint32_t srcBinding;
6397 uint32_t srcArrayElement;
6398 DescriptorSet dstSet;
6399 uint32_t dstBinding;
6400 uint32_t dstArrayElement;
6401 uint32_t descriptorCount;
6402 };
6403 static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
6404
6405 struct BufferViewCreateInfo
6406 {
6407 BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 )
6408 : sType( StructureType::eBufferViewCreateInfo )
6409 , pNext( nullptr )
6410 , flags( flags_ )
6411 , buffer( buffer_ )
6412 , format( format_ )
6413 , offset( offset_ )
6414 , range( range_ )
6415 {
6416 }
6417
6418 BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
6419 {
6420 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6421 }
6422
6423 BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
6424 {
6425 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6426 return *this;
6427 }
6428
6429 BufferViewCreateInfo& setSType( StructureType sType_ )
6430 {
6431 sType = sType_;
6432 return *this;
6433 }
6434
6435 BufferViewCreateInfo& setPNext( const void* pNext_ )
6436 {
6437 pNext = pNext_;
6438 return *this;
6439 }
6440
6441 BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
6442 {
6443 flags = flags_;
6444 return *this;
6445 }
6446
6447 BufferViewCreateInfo& setBuffer( Buffer buffer_ )
6448 {
6449 buffer = buffer_;
6450 return *this;
6451 }
6452
6453 BufferViewCreateInfo& setFormat( Format format_ )
6454 {
6455 format = format_;
6456 return *this;
6457 }
6458
6459 BufferViewCreateInfo& setOffset( DeviceSize offset_ )
6460 {
6461 offset = offset_;
6462 return *this;
6463 }
6464
6465 BufferViewCreateInfo& setRange( DeviceSize range_ )
6466 {
6467 range = range_;
6468 return *this;
6469 }
6470
6471 operator const VkBufferViewCreateInfo&() const
6472 {
6473 return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
6474 }
6475
6476 bool operator==( BufferViewCreateInfo const& rhs ) const
6477 {
6478 return ( sType == rhs.sType )
6479 && ( pNext == rhs.pNext )
6480 && ( flags == rhs.flags )
6481 && ( buffer == rhs.buffer )
6482 && ( format == rhs.format )
6483 && ( offset == rhs.offset )
6484 && ( range == rhs.range );
6485 }
6486
6487 bool operator!=( BufferViewCreateInfo const& rhs ) const
6488 {
6489 return !operator==( rhs );
6490 }
6491
6492 private:
6493 StructureType sType;
6494
6495 public:
6496 const void* pNext;
6497 BufferViewCreateFlags flags;
6498 Buffer buffer;
6499 Format format;
6500 DeviceSize offset;
6501 DeviceSize range;
6502 };
6503 static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
6504
6505 struct ShaderModuleCreateInfo
6506 {
6507 ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr )
6508 : sType( StructureType::eShaderModuleCreateInfo )
6509 , pNext( nullptr )
6510 , flags( flags_ )
6511 , codeSize( codeSize_ )
6512 , pCode( pCode_ )
6513 {
6514 }
6515
6516 ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
6517 {
6518 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6519 }
6520
6521 ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
6522 {
6523 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6524 return *this;
6525 }
6526
6527 ShaderModuleCreateInfo& setSType( StructureType sType_ )
6528 {
6529 sType = sType_;
6530 return *this;
6531 }
6532
6533 ShaderModuleCreateInfo& setPNext( const void* pNext_ )
6534 {
6535 pNext = pNext_;
6536 return *this;
6537 }
6538
6539 ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
6540 {
6541 flags = flags_;
6542 return *this;
6543 }
6544
6545 ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
6546 {
6547 codeSize = codeSize_;
6548 return *this;
6549 }
6550
6551 ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
6552 {
6553 pCode = pCode_;
6554 return *this;
6555 }
6556
6557 operator const VkShaderModuleCreateInfo&() const
6558 {
6559 return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
6560 }
6561
6562 bool operator==( ShaderModuleCreateInfo const& rhs ) const
6563 {
6564 return ( sType == rhs.sType )
6565 && ( pNext == rhs.pNext )
6566 && ( flags == rhs.flags )
6567 && ( codeSize == rhs.codeSize )
6568 && ( pCode == rhs.pCode );
6569 }
6570
6571 bool operator!=( ShaderModuleCreateInfo const& rhs ) const
6572 {
6573 return !operator==( rhs );
6574 }
6575
6576 private:
6577 StructureType sType;
6578
6579 public:
6580 const void* pNext;
6581 ShaderModuleCreateFlags flags;
6582 size_t codeSize;
6583 const uint32_t* pCode;
6584 };
6585 static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
6586
6587 struct DescriptorSetAllocateInfo
6588 {
6589 DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr )
6590 : sType( StructureType::eDescriptorSetAllocateInfo )
6591 , pNext( nullptr )
6592 , descriptorPool( descriptorPool_ )
6593 , descriptorSetCount( descriptorSetCount_ )
6594 , pSetLayouts( pSetLayouts_ )
6595 {
6596 }
6597
6598 DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
6599 {
6600 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6601 }
6602
6603 DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
6604 {
6605 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6606 return *this;
6607 }
6608
6609 DescriptorSetAllocateInfo& setSType( StructureType sType_ )
6610 {
6611 sType = sType_;
6612 return *this;
6613 }
6614
6615 DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
6616 {
6617 pNext = pNext_;
6618 return *this;
6619 }
6620
6621 DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
6622 {
6623 descriptorPool = descriptorPool_;
6624 return *this;
6625 }
6626
6627 DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
6628 {
6629 descriptorSetCount = descriptorSetCount_;
6630 return *this;
6631 }
6632
6633 DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
6634 {
6635 pSetLayouts = pSetLayouts_;
6636 return *this;
6637 }
6638
6639 operator const VkDescriptorSetAllocateInfo&() const
6640 {
6641 return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
6642 }
6643
6644 bool operator==( DescriptorSetAllocateInfo const& rhs ) const
6645 {
6646 return ( sType == rhs.sType )
6647 && ( pNext == rhs.pNext )
6648 && ( descriptorPool == rhs.descriptorPool )
6649 && ( descriptorSetCount == rhs.descriptorSetCount )
6650 && ( pSetLayouts == rhs.pSetLayouts );
6651 }
6652
6653 bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
6654 {
6655 return !operator==( rhs );
6656 }
6657
6658 private:
6659 StructureType sType;
6660
6661 public:
6662 const void* pNext;
6663 DescriptorPool descriptorPool;
6664 uint32_t descriptorSetCount;
6665 const DescriptorSetLayout* pSetLayouts;
6666 };
6667 static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
6668
6669 struct PipelineVertexInputStateCreateInfo
6670 {
6671 PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
6672 : sType( StructureType::ePipelineVertexInputStateCreateInfo )
6673 , pNext( nullptr )
6674 , flags( flags_ )
6675 , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
6676 , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
6677 , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
6678 , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
6679 {
6680 }
6681
6682 PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
6683 {
6684 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
6685 }
6686
6687 PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
6688 {
6689 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
6690 return *this;
6691 }
6692
6693 PipelineVertexInputStateCreateInfo& setSType( StructureType sType_ )
6694 {
6695 sType = sType_;
6696 return *this;
6697 }
6698
6699 PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
6700 {
6701 pNext = pNext_;
6702 return *this;
6703 }
6704
6705 PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
6706 {
6707 flags = flags_;
6708 return *this;
6709 }
6710
6711 PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
6712 {
6713 vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
6714 return *this;
6715 }
6716
6717 PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
6718 {
6719 pVertexBindingDescriptions = pVertexBindingDescriptions_;
6720 return *this;
6721 }
6722
6723 PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
6724 {
6725 vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
6726 return *this;
6727 }
6728
6729 PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
6730 {
6731 pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
6732 return *this;
6733 }
6734
6735 operator const VkPipelineVertexInputStateCreateInfo&() const
6736 {
6737 return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
6738 }
6739
6740 bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
6741 {
6742 return ( sType == rhs.sType )
6743 && ( pNext == rhs.pNext )
6744 && ( flags == rhs.flags )
6745 && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
6746 && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
6747 && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
6748 && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
6749 }
6750
6751 bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
6752 {
6753 return !operator==( rhs );
6754 }
6755
6756 private:
6757 StructureType sType;
6758
6759 public:
6760 const void* pNext;
6761 PipelineVertexInputStateCreateFlags flags;
6762 uint32_t vertexBindingDescriptionCount;
6763 const VertexInputBindingDescription* pVertexBindingDescriptions;
6764 uint32_t vertexAttributeDescriptionCount;
6765 const VertexInputAttributeDescription* pVertexAttributeDescriptions;
6766 };
6767 static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
6768
6769 struct PipelineInputAssemblyStateCreateInfo
6770 {
6771 PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 )
6772 : sType( StructureType::ePipelineInputAssemblyStateCreateInfo )
6773 , pNext( nullptr )
6774 , flags( flags_ )
6775 , topology( topology_ )
6776 , primitiveRestartEnable( primitiveRestartEnable_ )
6777 {
6778 }
6779
6780 PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
6781 {
6782 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
6783 }
6784
6785 PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
6786 {
6787 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
6788 return *this;
6789 }
6790
6791 PipelineInputAssemblyStateCreateInfo& setSType( StructureType sType_ )
6792 {
6793 sType = sType_;
6794 return *this;
6795 }
6796
6797 PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
6798 {
6799 pNext = pNext_;
6800 return *this;
6801 }
6802
6803 PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
6804 {
6805 flags = flags_;
6806 return *this;
6807 }
6808
6809 PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
6810 {
6811 topology = topology_;
6812 return *this;
6813 }
6814
6815 PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
6816 {
6817 primitiveRestartEnable = primitiveRestartEnable_;
6818 return *this;
6819 }
6820
6821 operator const VkPipelineInputAssemblyStateCreateInfo&() const
6822 {
6823 return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
6824 }
6825
6826 bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
6827 {
6828 return ( sType == rhs.sType )
6829 && ( pNext == rhs.pNext )
6830 && ( flags == rhs.flags )
6831 && ( topology == rhs.topology )
6832 && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
6833 }
6834
6835 bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
6836 {
6837 return !operator==( rhs );
6838 }
6839
6840 private:
6841 StructureType sType;
6842
6843 public:
6844 const void* pNext;
6845 PipelineInputAssemblyStateCreateFlags flags;
6846 PrimitiveTopology topology;
6847 Bool32 primitiveRestartEnable;
6848 };
6849 static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
6850
6851 struct PipelineTessellationStateCreateInfo
6852 {
6853 PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 )
6854 : sType( StructureType::ePipelineTessellationStateCreateInfo )
6855 , pNext( nullptr )
6856 , flags( flags_ )
6857 , patchControlPoints( patchControlPoints_ )
6858 {
6859 }
6860
6861 PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
6862 {
6863 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
6864 }
6865
6866 PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
6867 {
6868 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
6869 return *this;
6870 }
6871
6872 PipelineTessellationStateCreateInfo& setSType( StructureType sType_ )
6873 {
6874 sType = sType_;
6875 return *this;
6876 }
6877
6878 PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
6879 {
6880 pNext = pNext_;
6881 return *this;
6882 }
6883
6884 PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
6885 {
6886 flags = flags_;
6887 return *this;
6888 }
6889
6890 PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
6891 {
6892 patchControlPoints = patchControlPoints_;
6893 return *this;
6894 }
6895
6896 operator const VkPipelineTessellationStateCreateInfo&() const
6897 {
6898 return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
6899 }
6900
6901 bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
6902 {
6903 return ( sType == rhs.sType )
6904 && ( pNext == rhs.pNext )
6905 && ( flags == rhs.flags )
6906 && ( patchControlPoints == rhs.patchControlPoints );
6907 }
6908
6909 bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
6910 {
6911 return !operator==( rhs );
6912 }
6913
6914 private:
6915 StructureType sType;
6916
6917 public:
6918 const void* pNext;
6919 PipelineTessellationStateCreateFlags flags;
6920 uint32_t patchControlPoints;
6921 };
6922 static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
6923
6924 struct PipelineViewportStateCreateInfo
6925 {
6926 PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr )
6927 : sType( StructureType::ePipelineViewportStateCreateInfo )
6928 , pNext( nullptr )
6929 , flags( flags_ )
6930 , viewportCount( viewportCount_ )
6931 , pViewports( pViewports_ )
6932 , scissorCount( scissorCount_ )
6933 , pScissors( pScissors_ )
6934 {
6935 }
6936
6937 PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
6938 {
6939 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
6940 }
6941
6942 PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
6943 {
6944 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
6945 return *this;
6946 }
6947
6948 PipelineViewportStateCreateInfo& setSType( StructureType sType_ )
6949 {
6950 sType = sType_;
6951 return *this;
6952 }
6953
6954 PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
6955 {
6956 pNext = pNext_;
6957 return *this;
6958 }
6959
6960 PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
6961 {
6962 flags = flags_;
6963 return *this;
6964 }
6965
6966 PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
6967 {
6968 viewportCount = viewportCount_;
6969 return *this;
6970 }
6971
6972 PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
6973 {
6974 pViewports = pViewports_;
6975 return *this;
6976 }
6977
6978 PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
6979 {
6980 scissorCount = scissorCount_;
6981 return *this;
6982 }
6983
6984 PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
6985 {
6986 pScissors = pScissors_;
6987 return *this;
6988 }
6989
6990 operator const VkPipelineViewportStateCreateInfo&() const
6991 {
6992 return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
6993 }
6994
6995 bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
6996 {
6997 return ( sType == rhs.sType )
6998 && ( pNext == rhs.pNext )
6999 && ( flags == rhs.flags )
7000 && ( viewportCount == rhs.viewportCount )
7001 && ( pViewports == rhs.pViewports )
7002 && ( scissorCount == rhs.scissorCount )
7003 && ( pScissors == rhs.pScissors );
7004 }
7005
7006 bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
7007 {
7008 return !operator==( rhs );
7009 }
7010
7011 private:
7012 StructureType sType;
7013
7014 public:
7015 const void* pNext;
7016 PipelineViewportStateCreateFlags flags;
7017 uint32_t viewportCount;
7018 const Viewport* pViewports;
7019 uint32_t scissorCount;
7020 const Rect2D* pScissors;
7021 };
7022 static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
7023
7024 struct PipelineRasterizationStateCreateInfo
7025 {
7026 PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 )
7027 : sType( StructureType::ePipelineRasterizationStateCreateInfo )
7028 , pNext( nullptr )
7029 , flags( flags_ )
7030 , depthClampEnable( depthClampEnable_ )
7031 , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
7032 , polygonMode( polygonMode_ )
7033 , cullMode( cullMode_ )
7034 , frontFace( frontFace_ )
7035 , depthBiasEnable( depthBiasEnable_ )
7036 , depthBiasConstantFactor( depthBiasConstantFactor_ )
7037 , depthBiasClamp( depthBiasClamp_ )
7038 , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
7039 , lineWidth( lineWidth_ )
7040 {
7041 }
7042
7043 PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
7044 {
7045 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7046 }
7047
7048 PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
7049 {
7050 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7051 return *this;
7052 }
7053
7054 PipelineRasterizationStateCreateInfo& setSType( StructureType sType_ )
7055 {
7056 sType = sType_;
7057 return *this;
7058 }
7059
7060 PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
7061 {
7062 pNext = pNext_;
7063 return *this;
7064 }
7065
7066 PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
7067 {
7068 flags = flags_;
7069 return *this;
7070 }
7071
7072 PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
7073 {
7074 depthClampEnable = depthClampEnable_;
7075 return *this;
7076 }
7077
7078 PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
7079 {
7080 rasterizerDiscardEnable = rasterizerDiscardEnable_;
7081 return *this;
7082 }
7083
7084 PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
7085 {
7086 polygonMode = polygonMode_;
7087 return *this;
7088 }
7089
7090 PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
7091 {
7092 cullMode = cullMode_;
7093 return *this;
7094 }
7095
7096 PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
7097 {
7098 frontFace = frontFace_;
7099 return *this;
7100 }
7101
7102 PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
7103 {
7104 depthBiasEnable = depthBiasEnable_;
7105 return *this;
7106 }
7107
7108 PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
7109 {
7110 depthBiasConstantFactor = depthBiasConstantFactor_;
7111 return *this;
7112 }
7113
7114 PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
7115 {
7116 depthBiasClamp = depthBiasClamp_;
7117 return *this;
7118 }
7119
7120 PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
7121 {
7122 depthBiasSlopeFactor = depthBiasSlopeFactor_;
7123 return *this;
7124 }
7125
7126 PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
7127 {
7128 lineWidth = lineWidth_;
7129 return *this;
7130 }
7131
7132 operator const VkPipelineRasterizationStateCreateInfo&() const
7133 {
7134 return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
7135 }
7136
7137 bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
7138 {
7139 return ( sType == rhs.sType )
7140 && ( pNext == rhs.pNext )
7141 && ( flags == rhs.flags )
7142 && ( depthClampEnable == rhs.depthClampEnable )
7143 && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
7144 && ( polygonMode == rhs.polygonMode )
7145 && ( cullMode == rhs.cullMode )
7146 && ( frontFace == rhs.frontFace )
7147 && ( depthBiasEnable == rhs.depthBiasEnable )
7148 && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
7149 && ( depthBiasClamp == rhs.depthBiasClamp )
7150 && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
7151 && ( lineWidth == rhs.lineWidth );
7152 }
7153
7154 bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
7155 {
7156 return !operator==( rhs );
7157 }
7158
7159 private:
7160 StructureType sType;
7161
7162 public:
7163 const void* pNext;
7164 PipelineRasterizationStateCreateFlags flags;
7165 Bool32 depthClampEnable;
7166 Bool32 rasterizerDiscardEnable;
7167 PolygonMode polygonMode;
7168 CullModeFlags cullMode;
7169 FrontFace frontFace;
7170 Bool32 depthBiasEnable;
7171 float depthBiasConstantFactor;
7172 float depthBiasClamp;
7173 float depthBiasSlopeFactor;
7174 float lineWidth;
7175 };
7176 static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
7177
7178 struct PipelineDepthStencilStateCreateInfo
7179 {
7180 PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 )
7181 : sType( StructureType::ePipelineDepthStencilStateCreateInfo )
7182 , pNext( nullptr )
7183 , flags( flags_ )
7184 , depthTestEnable( depthTestEnable_ )
7185 , depthWriteEnable( depthWriteEnable_ )
7186 , depthCompareOp( depthCompareOp_ )
7187 , depthBoundsTestEnable( depthBoundsTestEnable_ )
7188 , stencilTestEnable( stencilTestEnable_ )
7189 , front( front_ )
7190 , back( back_ )
7191 , minDepthBounds( minDepthBounds_ )
7192 , maxDepthBounds( maxDepthBounds_ )
7193 {
7194 }
7195
7196 PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
7197 {
7198 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7199 }
7200
7201 PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
7202 {
7203 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7204 return *this;
7205 }
7206
7207 PipelineDepthStencilStateCreateInfo& setSType( StructureType sType_ )
7208 {
7209 sType = sType_;
7210 return *this;
7211 }
7212
7213 PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
7214 {
7215 pNext = pNext_;
7216 return *this;
7217 }
7218
7219 PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
7220 {
7221 flags = flags_;
7222 return *this;
7223 }
7224
7225 PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
7226 {
7227 depthTestEnable = depthTestEnable_;
7228 return *this;
7229 }
7230
7231 PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
7232 {
7233 depthWriteEnable = depthWriteEnable_;
7234 return *this;
7235 }
7236
7237 PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
7238 {
7239 depthCompareOp = depthCompareOp_;
7240 return *this;
7241 }
7242
7243 PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
7244 {
7245 depthBoundsTestEnable = depthBoundsTestEnable_;
7246 return *this;
7247 }
7248
7249 PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
7250 {
7251 stencilTestEnable = stencilTestEnable_;
7252 return *this;
7253 }
7254
7255 PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
7256 {
7257 front = front_;
7258 return *this;
7259 }
7260
7261 PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
7262 {
7263 back = back_;
7264 return *this;
7265 }
7266
7267 PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
7268 {
7269 minDepthBounds = minDepthBounds_;
7270 return *this;
7271 }
7272
7273 PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
7274 {
7275 maxDepthBounds = maxDepthBounds_;
7276 return *this;
7277 }
7278
7279 operator const VkPipelineDepthStencilStateCreateInfo&() const
7280 {
7281 return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
7282 }
7283
7284 bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
7285 {
7286 return ( sType == rhs.sType )
7287 && ( pNext == rhs.pNext )
7288 && ( flags == rhs.flags )
7289 && ( depthTestEnable == rhs.depthTestEnable )
7290 && ( depthWriteEnable == rhs.depthWriteEnable )
7291 && ( depthCompareOp == rhs.depthCompareOp )
7292 && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
7293 && ( stencilTestEnable == rhs.stencilTestEnable )
7294 && ( front == rhs.front )
7295 && ( back == rhs.back )
7296 && ( minDepthBounds == rhs.minDepthBounds )
7297 && ( maxDepthBounds == rhs.maxDepthBounds );
7298 }
7299
7300 bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
7301 {
7302 return !operator==( rhs );
7303 }
7304
7305 private:
7306 StructureType sType;
7307
7308 public:
7309 const void* pNext;
7310 PipelineDepthStencilStateCreateFlags flags;
7311 Bool32 depthTestEnable;
7312 Bool32 depthWriteEnable;
7313 CompareOp depthCompareOp;
7314 Bool32 depthBoundsTestEnable;
7315 Bool32 stencilTestEnable;
7316 StencilOpState front;
7317 StencilOpState back;
7318 float minDepthBounds;
7319 float maxDepthBounds;
7320 };
7321 static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
7322
7323 struct PipelineCacheCreateInfo
7324 {
7325 PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
7326 : sType( StructureType::ePipelineCacheCreateInfo )
7327 , pNext( nullptr )
7328 , flags( flags_ )
7329 , initialDataSize( initialDataSize_ )
7330 , pInitialData( pInitialData_ )
7331 {
7332 }
7333
7334 PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
7335 {
7336 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7337 }
7338
7339 PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
7340 {
7341 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7342 return *this;
7343 }
7344
7345 PipelineCacheCreateInfo& setSType( StructureType sType_ )
7346 {
7347 sType = sType_;
7348 return *this;
7349 }
7350
7351 PipelineCacheCreateInfo& setPNext( const void* pNext_ )
7352 {
7353 pNext = pNext_;
7354 return *this;
7355 }
7356
7357 PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
7358 {
7359 flags = flags_;
7360 return *this;
7361 }
7362
7363 PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
7364 {
7365 initialDataSize = initialDataSize_;
7366 return *this;
7367 }
7368
7369 PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
7370 {
7371 pInitialData = pInitialData_;
7372 return *this;
7373 }
7374
7375 operator const VkPipelineCacheCreateInfo&() const
7376 {
7377 return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
7378 }
7379
7380 bool operator==( PipelineCacheCreateInfo const& rhs ) const
7381 {
7382 return ( sType == rhs.sType )
7383 && ( pNext == rhs.pNext )
7384 && ( flags == rhs.flags )
7385 && ( initialDataSize == rhs.initialDataSize )
7386 && ( pInitialData == rhs.pInitialData );
7387 }
7388
7389 bool operator!=( PipelineCacheCreateInfo const& rhs ) const
7390 {
7391 return !operator==( rhs );
7392 }
7393
7394 private:
7395 StructureType sType;
7396
7397 public:
7398 const void* pNext;
7399 PipelineCacheCreateFlags flags;
7400 size_t initialDataSize;
7401 const void* pInitialData;
7402 };
7403 static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
7404
7405 struct SamplerCreateInfo
7406 {
7407 SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 )
7408 : sType( StructureType::eSamplerCreateInfo )
7409 , pNext( nullptr )
7410 , flags( flags_ )
7411 , magFilter( magFilter_ )
7412 , minFilter( minFilter_ )
7413 , mipmapMode( mipmapMode_ )
7414 , addressModeU( addressModeU_ )
7415 , addressModeV( addressModeV_ )
7416 , addressModeW( addressModeW_ )
7417 , mipLodBias( mipLodBias_ )
7418 , anisotropyEnable( anisotropyEnable_ )
7419 , maxAnisotropy( maxAnisotropy_ )
7420 , compareEnable( compareEnable_ )
7421 , compareOp( compareOp_ )
7422 , minLod( minLod_ )
7423 , maxLod( maxLod_ )
7424 , borderColor( borderColor_ )
7425 , unnormalizedCoordinates( unnormalizedCoordinates_ )
7426 {
7427 }
7428
7429 SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
7430 {
7431 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7432 }
7433
7434 SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
7435 {
7436 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7437 return *this;
7438 }
7439
7440 SamplerCreateInfo& setSType( StructureType sType_ )
7441 {
7442 sType = sType_;
7443 return *this;
7444 }
7445
7446 SamplerCreateInfo& setPNext( const void* pNext_ )
7447 {
7448 pNext = pNext_;
7449 return *this;
7450 }
7451
7452 SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
7453 {
7454 flags = flags_;
7455 return *this;
7456 }
7457
7458 SamplerCreateInfo& setMagFilter( Filter magFilter_ )
7459 {
7460 magFilter = magFilter_;
7461 return *this;
7462 }
7463
7464 SamplerCreateInfo& setMinFilter( Filter minFilter_ )
7465 {
7466 minFilter = minFilter_;
7467 return *this;
7468 }
7469
7470 SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
7471 {
7472 mipmapMode = mipmapMode_;
7473 return *this;
7474 }
7475
7476 SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
7477 {
7478 addressModeU = addressModeU_;
7479 return *this;
7480 }
7481
7482 SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
7483 {
7484 addressModeV = addressModeV_;
7485 return *this;
7486 }
7487
7488 SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
7489 {
7490 addressModeW = addressModeW_;
7491 return *this;
7492 }
7493
7494 SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
7495 {
7496 mipLodBias = mipLodBias_;
7497 return *this;
7498 }
7499
7500 SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
7501 {
7502 anisotropyEnable = anisotropyEnable_;
7503 return *this;
7504 }
7505
7506 SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
7507 {
7508 maxAnisotropy = maxAnisotropy_;
7509 return *this;
7510 }
7511
7512 SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
7513 {
7514 compareEnable = compareEnable_;
7515 return *this;
7516 }
7517
7518 SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
7519 {
7520 compareOp = compareOp_;
7521 return *this;
7522 }
7523
7524 SamplerCreateInfo& setMinLod( float minLod_ )
7525 {
7526 minLod = minLod_;
7527 return *this;
7528 }
7529
7530 SamplerCreateInfo& setMaxLod( float maxLod_ )
7531 {
7532 maxLod = maxLod_;
7533 return *this;
7534 }
7535
7536 SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
7537 {
7538 borderColor = borderColor_;
7539 return *this;
7540 }
7541
7542 SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
7543 {
7544 unnormalizedCoordinates = unnormalizedCoordinates_;
7545 return *this;
7546 }
7547
7548 operator const VkSamplerCreateInfo&() const
7549 {
7550 return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
7551 }
7552
7553 bool operator==( SamplerCreateInfo const& rhs ) const
7554 {
7555 return ( sType == rhs.sType )
7556 && ( pNext == rhs.pNext )
7557 && ( flags == rhs.flags )
7558 && ( magFilter == rhs.magFilter )
7559 && ( minFilter == rhs.minFilter )
7560 && ( mipmapMode == rhs.mipmapMode )
7561 && ( addressModeU == rhs.addressModeU )
7562 && ( addressModeV == rhs.addressModeV )
7563 && ( addressModeW == rhs.addressModeW )
7564 && ( mipLodBias == rhs.mipLodBias )
7565 && ( anisotropyEnable == rhs.anisotropyEnable )
7566 && ( maxAnisotropy == rhs.maxAnisotropy )
7567 && ( compareEnable == rhs.compareEnable )
7568 && ( compareOp == rhs.compareOp )
7569 && ( minLod == rhs.minLod )
7570 && ( maxLod == rhs.maxLod )
7571 && ( borderColor == rhs.borderColor )
7572 && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
7573 }
7574
7575 bool operator!=( SamplerCreateInfo const& rhs ) const
7576 {
7577 return !operator==( rhs );
7578 }
7579
7580 private:
7581 StructureType sType;
7582
7583 public:
7584 const void* pNext;
7585 SamplerCreateFlags flags;
7586 Filter magFilter;
7587 Filter minFilter;
7588 SamplerMipmapMode mipmapMode;
7589 SamplerAddressMode addressModeU;
7590 SamplerAddressMode addressModeV;
7591 SamplerAddressMode addressModeW;
7592 float mipLodBias;
7593 Bool32 anisotropyEnable;
7594 float maxAnisotropy;
7595 Bool32 compareEnable;
7596 CompareOp compareOp;
7597 float minLod;
7598 float maxLod;
7599 BorderColor borderColor;
7600 Bool32 unnormalizedCoordinates;
7601 };
7602 static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
7603
7604 struct CommandBufferAllocateInfo
7605 {
7606 CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 )
7607 : sType( StructureType::eCommandBufferAllocateInfo )
7608 , pNext( nullptr )
7609 , commandPool( commandPool_ )
7610 , level( level_ )
7611 , commandBufferCount( commandBufferCount_ )
7612 {
7613 }
7614
7615 CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
7616 {
7617 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7618 }
7619
7620 CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
7621 {
7622 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7623 return *this;
7624 }
7625
7626 CommandBufferAllocateInfo& setSType( StructureType sType_ )
7627 {
7628 sType = sType_;
7629 return *this;
7630 }
7631
7632 CommandBufferAllocateInfo& setPNext( const void* pNext_ )
7633 {
7634 pNext = pNext_;
7635 return *this;
7636 }
7637
7638 CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
7639 {
7640 commandPool = commandPool_;
7641 return *this;
7642 }
7643
7644 CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
7645 {
7646 level = level_;
7647 return *this;
7648 }
7649
7650 CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
7651 {
7652 commandBufferCount = commandBufferCount_;
7653 return *this;
7654 }
7655
7656 operator const VkCommandBufferAllocateInfo&() const
7657 {
7658 return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
7659 }
7660
7661 bool operator==( CommandBufferAllocateInfo const& rhs ) const
7662 {
7663 return ( sType == rhs.sType )
7664 && ( pNext == rhs.pNext )
7665 && ( commandPool == rhs.commandPool )
7666 && ( level == rhs.level )
7667 && ( commandBufferCount == rhs.commandBufferCount );
7668 }
7669
7670 bool operator!=( CommandBufferAllocateInfo const& rhs ) const
7671 {
7672 return !operator==( rhs );
7673 }
7674
7675 private:
7676 StructureType sType;
7677
7678 public:
7679 const void* pNext;
7680 CommandPool commandPool;
7681 CommandBufferLevel level;
7682 uint32_t commandBufferCount;
7683 };
7684 static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
7685
7686 struct RenderPassBeginInfo
7687 {
7688 RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr )
7689 : sType( StructureType::eRenderPassBeginInfo )
7690 , pNext( nullptr )
7691 , renderPass( renderPass_ )
7692 , framebuffer( framebuffer_ )
7693 , renderArea( renderArea_ )
7694 , clearValueCount( clearValueCount_ )
7695 , pClearValues( pClearValues_ )
7696 {
7697 }
7698
7699 RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
7700 {
7701 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7702 }
7703
7704 RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
7705 {
7706 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7707 return *this;
7708 }
7709
7710 RenderPassBeginInfo& setSType( StructureType sType_ )
7711 {
7712 sType = sType_;
7713 return *this;
7714 }
7715
7716 RenderPassBeginInfo& setPNext( const void* pNext_ )
7717 {
7718 pNext = pNext_;
7719 return *this;
7720 }
7721
7722 RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
7723 {
7724 renderPass = renderPass_;
7725 return *this;
7726 }
7727
7728 RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
7729 {
7730 framebuffer = framebuffer_;
7731 return *this;
7732 }
7733
7734 RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
7735 {
7736 renderArea = renderArea_;
7737 return *this;
7738 }
7739
7740 RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
7741 {
7742 clearValueCount = clearValueCount_;
7743 return *this;
7744 }
7745
7746 RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
7747 {
7748 pClearValues = pClearValues_;
7749 return *this;
7750 }
7751
7752 operator const VkRenderPassBeginInfo&() const
7753 {
7754 return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
7755 }
7756
7757 bool operator==( RenderPassBeginInfo const& rhs ) const
7758 {
7759 return ( sType == rhs.sType )
7760 && ( pNext == rhs.pNext )
7761 && ( renderPass == rhs.renderPass )
7762 && ( framebuffer == rhs.framebuffer )
7763 && ( renderArea == rhs.renderArea )
7764 && ( clearValueCount == rhs.clearValueCount )
7765 && ( pClearValues == rhs.pClearValues );
7766 }
7767
7768 bool operator!=( RenderPassBeginInfo const& rhs ) const
7769 {
7770 return !operator==( rhs );
7771 }
7772
7773 private:
7774 StructureType sType;
7775
7776 public:
7777 const void* pNext;
7778 RenderPass renderPass;
7779 Framebuffer framebuffer;
7780 Rect2D renderArea;
7781 uint32_t clearValueCount;
7782 const ClearValue* pClearValues;
7783 };
7784 static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
7785
7786 struct EventCreateInfo
7787 {
7788 EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
7789 : sType( StructureType::eEventCreateInfo )
7790 , pNext( nullptr )
7791 , flags( flags_ )
7792 {
7793 }
7794
7795 EventCreateInfo( VkEventCreateInfo const & rhs )
7796 {
7797 memcpy( this, &rhs, sizeof(EventCreateInfo) );
7798 }
7799
7800 EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
7801 {
7802 memcpy( this, &rhs, sizeof(EventCreateInfo) );
7803 return *this;
7804 }
7805
7806 EventCreateInfo& setSType( StructureType sType_ )
7807 {
7808 sType = sType_;
7809 return *this;
7810 }
7811
7812 EventCreateInfo& setPNext( const void* pNext_ )
7813 {
7814 pNext = pNext_;
7815 return *this;
7816 }
7817
7818 EventCreateInfo& setFlags( EventCreateFlags flags_ )
7819 {
7820 flags = flags_;
7821 return *this;
7822 }
7823
7824 operator const VkEventCreateInfo&() const
7825 {
7826 return *reinterpret_cast<const VkEventCreateInfo*>(this);
7827 }
7828
7829 bool operator==( EventCreateInfo const& rhs ) const
7830 {
7831 return ( sType == rhs.sType )
7832 && ( pNext == rhs.pNext )
7833 && ( flags == rhs.flags );
7834 }
7835
7836 bool operator!=( EventCreateInfo const& rhs ) const
7837 {
7838 return !operator==( rhs );
7839 }
7840
7841 private:
7842 StructureType sType;
7843
7844 public:
7845 const void* pNext;
7846 EventCreateFlags flags;
7847 };
7848 static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
7849
7850 struct SemaphoreCreateInfo
7851 {
7852 SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
7853 : sType( StructureType::eSemaphoreCreateInfo )
7854 , pNext( nullptr )
7855 , flags( flags_ )
7856 {
7857 }
7858
7859 SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
7860 {
7861 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
7862 }
7863
7864 SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
7865 {
7866 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
7867 return *this;
7868 }
7869
7870 SemaphoreCreateInfo& setSType( StructureType sType_ )
7871 {
7872 sType = sType_;
7873 return *this;
7874 }
7875
7876 SemaphoreCreateInfo& setPNext( const void* pNext_ )
7877 {
7878 pNext = pNext_;
7879 return *this;
7880 }
7881
7882 SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
7883 {
7884 flags = flags_;
7885 return *this;
7886 }
7887
7888 operator const VkSemaphoreCreateInfo&() const
7889 {
7890 return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
7891 }
7892
7893 bool operator==( SemaphoreCreateInfo const& rhs ) const
7894 {
7895 return ( sType == rhs.sType )
7896 && ( pNext == rhs.pNext )
7897 && ( flags == rhs.flags );
7898 }
7899
7900 bool operator!=( SemaphoreCreateInfo const& rhs ) const
7901 {
7902 return !operator==( rhs );
7903 }
7904
7905 private:
7906 StructureType sType;
7907
7908 public:
7909 const void* pNext;
7910 SemaphoreCreateFlags flags;
7911 };
7912 static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
7913
7914 struct FramebufferCreateInfo
7915 {
7916 FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 )
7917 : sType( StructureType::eFramebufferCreateInfo )
7918 , pNext( nullptr )
7919 , flags( flags_ )
7920 , renderPass( renderPass_ )
7921 , attachmentCount( attachmentCount_ )
7922 , pAttachments( pAttachments_ )
7923 , width( width_ )
7924 , height( height_ )
7925 , layers( layers_ )
7926 {
7927 }
7928
7929 FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
7930 {
7931 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
7932 }
7933
7934 FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
7935 {
7936 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
7937 return *this;
7938 }
7939
7940 FramebufferCreateInfo& setSType( StructureType sType_ )
7941 {
7942 sType = sType_;
7943 return *this;
7944 }
7945
7946 FramebufferCreateInfo& setPNext( const void* pNext_ )
7947 {
7948 pNext = pNext_;
7949 return *this;
7950 }
7951
7952 FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
7953 {
7954 flags = flags_;
7955 return *this;
7956 }
7957
7958 FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
7959 {
7960 renderPass = renderPass_;
7961 return *this;
7962 }
7963
7964 FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
7965 {
7966 attachmentCount = attachmentCount_;
7967 return *this;
7968 }
7969
7970 FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
7971 {
7972 pAttachments = pAttachments_;
7973 return *this;
7974 }
7975
7976 FramebufferCreateInfo& setWidth( uint32_t width_ )
7977 {
7978 width = width_;
7979 return *this;
7980 }
7981
7982 FramebufferCreateInfo& setHeight( uint32_t height_ )
7983 {
7984 height = height_;
7985 return *this;
7986 }
7987
7988 FramebufferCreateInfo& setLayers( uint32_t layers_ )
7989 {
7990 layers = layers_;
7991 return *this;
7992 }
7993
7994 operator const VkFramebufferCreateInfo&() const
7995 {
7996 return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
7997 }
7998
7999 bool operator==( FramebufferCreateInfo const& rhs ) const
8000 {
8001 return ( sType == rhs.sType )
8002 && ( pNext == rhs.pNext )
8003 && ( flags == rhs.flags )
8004 && ( renderPass == rhs.renderPass )
8005 && ( attachmentCount == rhs.attachmentCount )
8006 && ( pAttachments == rhs.pAttachments )
8007 && ( width == rhs.width )
8008 && ( height == rhs.height )
8009 && ( layers == rhs.layers );
8010 }
8011
8012 bool operator!=( FramebufferCreateInfo const& rhs ) const
8013 {
8014 return !operator==( rhs );
8015 }
8016
8017 private:
8018 StructureType sType;
8019
8020 public:
8021 const void* pNext;
8022 FramebufferCreateFlags flags;
8023 RenderPass renderPass;
8024 uint32_t attachmentCount;
8025 const ImageView* pAttachments;
8026 uint32_t width;
8027 uint32_t height;
8028 uint32_t layers;
8029 };
8030 static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
8031
8032 struct DisplayModeCreateInfoKHR
8033 {
8034 DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
8035 : sType( StructureType::eDisplayModeCreateInfoKHR )
8036 , pNext( nullptr )
8037 , flags( flags_ )
8038 , parameters( parameters_ )
8039 {
8040 }
8041
8042 DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
8043 {
8044 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8045 }
8046
8047 DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
8048 {
8049 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8050 return *this;
8051 }
8052
8053 DisplayModeCreateInfoKHR& setSType( StructureType sType_ )
8054 {
8055 sType = sType_;
8056 return *this;
8057 }
8058
8059 DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
8060 {
8061 pNext = pNext_;
8062 return *this;
8063 }
8064
8065 DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
8066 {
8067 flags = flags_;
8068 return *this;
8069 }
8070
8071 DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
8072 {
8073 parameters = parameters_;
8074 return *this;
8075 }
8076
8077 operator const VkDisplayModeCreateInfoKHR&() const
8078 {
8079 return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
8080 }
8081
8082 bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
8083 {
8084 return ( sType == rhs.sType )
8085 && ( pNext == rhs.pNext )
8086 && ( flags == rhs.flags )
8087 && ( parameters == rhs.parameters );
8088 }
8089
8090 bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
8091 {
8092 return !operator==( rhs );
8093 }
8094
8095 private:
8096 StructureType sType;
8097
8098 public:
8099 const void* pNext;
8100 DisplayModeCreateFlagsKHR flags;
8101 DisplayModeParametersKHR parameters;
8102 };
8103 static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
8104
8105 struct DisplayPresentInfoKHR
8106 {
8107 DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 )
8108 : sType( StructureType::eDisplayPresentInfoKHR )
8109 , pNext( nullptr )
8110 , srcRect( srcRect_ )
8111 , dstRect( dstRect_ )
8112 , persistent( persistent_ )
8113 {
8114 }
8115
8116 DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
8117 {
8118 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8119 }
8120
8121 DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
8122 {
8123 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8124 return *this;
8125 }
8126
8127 DisplayPresentInfoKHR& setSType( StructureType sType_ )
8128 {
8129 sType = sType_;
8130 return *this;
8131 }
8132
8133 DisplayPresentInfoKHR& setPNext( const void* pNext_ )
8134 {
8135 pNext = pNext_;
8136 return *this;
8137 }
8138
8139 DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
8140 {
8141 srcRect = srcRect_;
8142 return *this;
8143 }
8144
8145 DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
8146 {
8147 dstRect = dstRect_;
8148 return *this;
8149 }
8150
8151 DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
8152 {
8153 persistent = persistent_;
8154 return *this;
8155 }
8156
8157 operator const VkDisplayPresentInfoKHR&() const
8158 {
8159 return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
8160 }
8161
8162 bool operator==( DisplayPresentInfoKHR const& rhs ) const
8163 {
8164 return ( sType == rhs.sType )
8165 && ( pNext == rhs.pNext )
8166 && ( srcRect == rhs.srcRect )
8167 && ( dstRect == rhs.dstRect )
8168 && ( persistent == rhs.persistent );
8169 }
8170
8171 bool operator!=( DisplayPresentInfoKHR const& rhs ) const
8172 {
8173 return !operator==( rhs );
8174 }
8175
8176 private:
8177 StructureType sType;
8178
8179 public:
8180 const void* pNext;
8181 Rect2D srcRect;
8182 Rect2D dstRect;
8183 Bool32 persistent;
8184 };
8185 static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
8186
8187#ifdef VK_USE_PLATFORM_ANDROID_KHR
8188 struct AndroidSurfaceCreateInfoKHR
8189 {
8190 AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), ANativeWindow* window_ = nullptr )
8191 : sType( StructureType::eAndroidSurfaceCreateInfoKHR )
8192 , pNext( nullptr )
8193 , flags( flags_ )
8194 , window( window_ )
8195 {
8196 }
8197
8198 AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
8199 {
8200 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8201 }
8202
8203 AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
8204 {
8205 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8206 return *this;
8207 }
8208
8209 AndroidSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8210 {
8211 sType = sType_;
8212 return *this;
8213 }
8214
8215 AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8216 {
8217 pNext = pNext_;
8218 return *this;
8219 }
8220
8221 AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
8222 {
8223 flags = flags_;
8224 return *this;
8225 }
8226
8227 AndroidSurfaceCreateInfoKHR& setWindow( ANativeWindow* window_ )
8228 {
8229 window = window_;
8230 return *this;
8231 }
8232
8233 operator const VkAndroidSurfaceCreateInfoKHR&() const
8234 {
8235 return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
8236 }
8237
8238 bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
8239 {
8240 return ( sType == rhs.sType )
8241 && ( pNext == rhs.pNext )
8242 && ( flags == rhs.flags )
8243 && ( window == rhs.window );
8244 }
8245
8246 bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
8247 {
8248 return !operator==( rhs );
8249 }
8250
8251 private:
8252 StructureType sType;
8253
8254 public:
8255 const void* pNext;
8256 AndroidSurfaceCreateFlagsKHR flags;
8257 ANativeWindow* window;
8258 };
8259 static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8260#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
8261
8262#ifdef VK_USE_PLATFORM_MIR_KHR
8263 struct MirSurfaceCreateInfoKHR
8264 {
8265 MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr )
8266 : sType( StructureType::eMirSurfaceCreateInfoKHR )
8267 , pNext( nullptr )
8268 , flags( flags_ )
8269 , connection( connection_ )
8270 , mirSurface( mirSurface_ )
8271 {
8272 }
8273
8274 MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs )
8275 {
8276 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8277 }
8278
8279 MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs )
8280 {
8281 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8282 return *this;
8283 }
8284
8285 MirSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8286 {
8287 sType = sType_;
8288 return *this;
8289 }
8290
8291 MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8292 {
8293 pNext = pNext_;
8294 return *this;
8295 }
8296
8297 MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ )
8298 {
8299 flags = flags_;
8300 return *this;
8301 }
8302
8303 MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ )
8304 {
8305 connection = connection_;
8306 return *this;
8307 }
8308
8309 MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ )
8310 {
8311 mirSurface = mirSurface_;
8312 return *this;
8313 }
8314
8315 operator const VkMirSurfaceCreateInfoKHR&() const
8316 {
8317 return *reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>(this);
8318 }
8319
8320 bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const
8321 {
8322 return ( sType == rhs.sType )
8323 && ( pNext == rhs.pNext )
8324 && ( flags == rhs.flags )
8325 && ( connection == rhs.connection )
8326 && ( mirSurface == rhs.mirSurface );
8327 }
8328
8329 bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const
8330 {
8331 return !operator==( rhs );
8332 }
8333
8334 private:
8335 StructureType sType;
8336
8337 public:
8338 const void* pNext;
8339 MirSurfaceCreateFlagsKHR flags;
8340 MirConnection* connection;
8341 MirSurface* mirSurface;
8342 };
8343 static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8344#endif /*VK_USE_PLATFORM_MIR_KHR*/
8345
Mark Youngb5f087a2017-01-19 21:10:49 -07008346#ifdef VK_USE_PLATFORM_VI_NN
8347 struct ViSurfaceCreateInfoNN
8348 {
8349 ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), void* window_ = nullptr )
8350 : sType( StructureType::eViSurfaceCreateInfoNN )
8351 , pNext( nullptr )
8352 , flags( flags_ )
8353 , window( window_ )
8354 {
8355 }
8356
8357 ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs )
8358 {
8359 memcpy( this, &rhs, sizeof(ViSurfaceCreateInfoNN) );
8360 }
8361
8362 ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs )
8363 {
8364 memcpy( this, &rhs, sizeof(ViSurfaceCreateInfoNN) );
8365 return *this;
8366 }
8367
8368 ViSurfaceCreateInfoNN& setSType( StructureType sType_ )
8369 {
8370 sType = sType_;
8371 return *this;
8372 }
8373
8374 ViSurfaceCreateInfoNN& setPNext( const void* pNext_ )
8375 {
8376 pNext = pNext_;
8377 return *this;
8378 }
8379
8380 ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ )
8381 {
8382 flags = flags_;
8383 return *this;
8384 }
8385
8386 ViSurfaceCreateInfoNN& setWindow( void* window_ )
8387 {
8388 window = window_;
8389 return *this;
8390 }
8391
8392 operator const VkViSurfaceCreateInfoNN&() const
8393 {
8394 return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>(this);
8395 }
8396
8397 bool operator==( ViSurfaceCreateInfoNN const& rhs ) const
8398 {
8399 return ( sType == rhs.sType )
8400 && ( pNext == rhs.pNext )
8401 && ( flags == rhs.flags )
8402 && ( window == rhs.window );
8403 }
8404
8405 bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const
8406 {
8407 return !operator==( rhs );
8408 }
8409
8410 private:
8411 StructureType sType;
8412
8413 public:
8414 const void* pNext;
8415 ViSurfaceCreateFlagsNN flags;
8416 void* window;
8417 };
8418 static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
8419#endif /*VK_USE_PLATFORM_VI_NN*/
8420
Lenny Komowb0a17f22016-08-11 11:23:15 -06008421#ifdef VK_USE_PLATFORM_WAYLAND_KHR
8422 struct WaylandSurfaceCreateInfoKHR
8423 {
8424 WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr )
8425 : sType( StructureType::eWaylandSurfaceCreateInfoKHR )
8426 , pNext( nullptr )
8427 , flags( flags_ )
8428 , display( display_ )
8429 , surface( surface_ )
8430 {
8431 }
8432
8433 WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
8434 {
8435 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8436 }
8437
8438 WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
8439 {
8440 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8441 return *this;
8442 }
8443
8444 WaylandSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8445 {
8446 sType = sType_;
8447 return *this;
8448 }
8449
8450 WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8451 {
8452 pNext = pNext_;
8453 return *this;
8454 }
8455
8456 WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
8457 {
8458 flags = flags_;
8459 return *this;
8460 }
8461
8462 WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
8463 {
8464 display = display_;
8465 return *this;
8466 }
8467
8468 WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
8469 {
8470 surface = surface_;
8471 return *this;
8472 }
8473
8474 operator const VkWaylandSurfaceCreateInfoKHR&() const
8475 {
8476 return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
8477 }
8478
8479 bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
8480 {
8481 return ( sType == rhs.sType )
8482 && ( pNext == rhs.pNext )
8483 && ( flags == rhs.flags )
8484 && ( display == rhs.display )
8485 && ( surface == rhs.surface );
8486 }
8487
8488 bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
8489 {
8490 return !operator==( rhs );
8491 }
8492
8493 private:
8494 StructureType sType;
8495
8496 public:
8497 const void* pNext;
8498 WaylandSurfaceCreateFlagsKHR flags;
8499 struct wl_display* display;
8500 struct wl_surface* surface;
8501 };
8502 static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8503#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
8504
8505#ifdef VK_USE_PLATFORM_WIN32_KHR
8506 struct Win32SurfaceCreateInfoKHR
8507 {
8508 Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 )
8509 : sType( StructureType::eWin32SurfaceCreateInfoKHR )
8510 , pNext( nullptr )
8511 , flags( flags_ )
8512 , hinstance( hinstance_ )
8513 , hwnd( hwnd_ )
8514 {
8515 }
8516
8517 Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
8518 {
8519 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8520 }
8521
8522 Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
8523 {
8524 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8525 return *this;
8526 }
8527
8528 Win32SurfaceCreateInfoKHR& setSType( StructureType sType_ )
8529 {
8530 sType = sType_;
8531 return *this;
8532 }
8533
8534 Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8535 {
8536 pNext = pNext_;
8537 return *this;
8538 }
8539
8540 Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
8541 {
8542 flags = flags_;
8543 return *this;
8544 }
8545
8546 Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
8547 {
8548 hinstance = hinstance_;
8549 return *this;
8550 }
8551
8552 Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
8553 {
8554 hwnd = hwnd_;
8555 return *this;
8556 }
8557
8558 operator const VkWin32SurfaceCreateInfoKHR&() const
8559 {
8560 return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
8561 }
8562
8563 bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
8564 {
8565 return ( sType == rhs.sType )
8566 && ( pNext == rhs.pNext )
8567 && ( flags == rhs.flags )
8568 && ( hinstance == rhs.hinstance )
8569 && ( hwnd == rhs.hwnd );
8570 }
8571
8572 bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
8573 {
8574 return !operator==( rhs );
8575 }
8576
8577 private:
8578 StructureType sType;
8579
8580 public:
8581 const void* pNext;
8582 Win32SurfaceCreateFlagsKHR flags;
8583 HINSTANCE hinstance;
8584 HWND hwnd;
8585 };
8586 static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8587#endif /*VK_USE_PLATFORM_WIN32_KHR*/
8588
8589#ifdef VK_USE_PLATFORM_XLIB_KHR
8590 struct XlibSurfaceCreateInfoKHR
8591 {
8592 XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 )
8593 : sType( StructureType::eXlibSurfaceCreateInfoKHR )
8594 , pNext( nullptr )
8595 , flags( flags_ )
8596 , dpy( dpy_ )
8597 , window( window_ )
8598 {
8599 }
8600
8601 XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
8602 {
8603 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8604 }
8605
8606 XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
8607 {
8608 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8609 return *this;
8610 }
8611
8612 XlibSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8613 {
8614 sType = sType_;
8615 return *this;
8616 }
8617
8618 XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8619 {
8620 pNext = pNext_;
8621 return *this;
8622 }
8623
8624 XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
8625 {
8626 flags = flags_;
8627 return *this;
8628 }
8629
8630 XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
8631 {
8632 dpy = dpy_;
8633 return *this;
8634 }
8635
8636 XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
8637 {
8638 window = window_;
8639 return *this;
8640 }
8641
8642 operator const VkXlibSurfaceCreateInfoKHR&() const
8643 {
8644 return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
8645 }
8646
8647 bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
8648 {
8649 return ( sType == rhs.sType )
8650 && ( pNext == rhs.pNext )
8651 && ( flags == rhs.flags )
8652 && ( dpy == rhs.dpy )
8653 && ( window == rhs.window );
8654 }
8655
8656 bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
8657 {
8658 return !operator==( rhs );
8659 }
8660
8661 private:
8662 StructureType sType;
8663
8664 public:
8665 const void* pNext;
8666 XlibSurfaceCreateFlagsKHR flags;
8667 Display* dpy;
8668 Window window;
8669 };
8670 static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8671#endif /*VK_USE_PLATFORM_XLIB_KHR*/
8672
8673#ifdef VK_USE_PLATFORM_XCB_KHR
8674 struct XcbSurfaceCreateInfoKHR
8675 {
8676 XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 )
8677 : sType( StructureType::eXcbSurfaceCreateInfoKHR )
8678 , pNext( nullptr )
8679 , flags( flags_ )
8680 , connection( connection_ )
8681 , window( window_ )
8682 {
8683 }
8684
8685 XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
8686 {
8687 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8688 }
8689
8690 XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
8691 {
8692 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8693 return *this;
8694 }
8695
8696 XcbSurfaceCreateInfoKHR& setSType( StructureType sType_ )
8697 {
8698 sType = sType_;
8699 return *this;
8700 }
8701
8702 XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8703 {
8704 pNext = pNext_;
8705 return *this;
8706 }
8707
8708 XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
8709 {
8710 flags = flags_;
8711 return *this;
8712 }
8713
8714 XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
8715 {
8716 connection = connection_;
8717 return *this;
8718 }
8719
8720 XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
8721 {
8722 window = window_;
8723 return *this;
8724 }
8725
8726 operator const VkXcbSurfaceCreateInfoKHR&() const
8727 {
8728 return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
8729 }
8730
8731 bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
8732 {
8733 return ( sType == rhs.sType )
8734 && ( pNext == rhs.pNext )
8735 && ( flags == rhs.flags )
8736 && ( connection == rhs.connection )
8737 && ( window == rhs.window );
8738 }
8739
8740 bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
8741 {
8742 return !operator==( rhs );
8743 }
8744
8745 private:
8746 StructureType sType;
8747
8748 public:
8749 const void* pNext;
8750 XcbSurfaceCreateFlagsKHR flags;
8751 xcb_connection_t* connection;
8752 xcb_window_t window;
8753 };
8754 static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8755#endif /*VK_USE_PLATFORM_XCB_KHR*/
8756
8757 struct DebugMarkerMarkerInfoEXT
8758 {
8759 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
8760 : sType( StructureType::eDebugMarkerMarkerInfoEXT )
8761 , pNext( nullptr )
8762 , pMarkerName( pMarkerName_ )
8763 {
8764 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8765 }
8766
8767 DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
8768 {
8769 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8770 }
8771
8772 DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
8773 {
8774 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8775 return *this;
8776 }
8777
8778 DebugMarkerMarkerInfoEXT& setSType( StructureType sType_ )
8779 {
8780 sType = sType_;
8781 return *this;
8782 }
8783
8784 DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
8785 {
8786 pNext = pNext_;
8787 return *this;
8788 }
8789
8790 DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
8791 {
8792 pMarkerName = pMarkerName_;
8793 return *this;
8794 }
8795
8796 DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
8797 {
8798 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8799 return *this;
8800 }
8801
8802 operator const VkDebugMarkerMarkerInfoEXT&() const
8803 {
8804 return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
8805 }
8806
8807 bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
8808 {
8809 return ( sType == rhs.sType )
8810 && ( pNext == rhs.pNext )
8811 && ( pMarkerName == rhs.pMarkerName )
8812 && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
8813 }
8814
8815 bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
8816 {
8817 return !operator==( rhs );
8818 }
8819
8820 private:
8821 StructureType sType;
8822
8823 public:
8824 const void* pNext;
8825 const char* pMarkerName;
8826 float color[4];
8827 };
8828 static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
8829
8830 struct DedicatedAllocationImageCreateInfoNV
8831 {
8832 DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
8833 : sType( StructureType::eDedicatedAllocationImageCreateInfoNV )
8834 , pNext( nullptr )
8835 , dedicatedAllocation( dedicatedAllocation_ )
8836 {
8837 }
8838
8839 DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
8840 {
8841 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
8842 }
8843
8844 DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
8845 {
8846 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
8847 return *this;
8848 }
8849
8850 DedicatedAllocationImageCreateInfoNV& setSType( StructureType sType_ )
8851 {
8852 sType = sType_;
8853 return *this;
8854 }
8855
8856 DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
8857 {
8858 pNext = pNext_;
8859 return *this;
8860 }
8861
8862 DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
8863 {
8864 dedicatedAllocation = dedicatedAllocation_;
8865 return *this;
8866 }
8867
8868 operator const VkDedicatedAllocationImageCreateInfoNV&() const
8869 {
8870 return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
8871 }
8872
8873 bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
8874 {
8875 return ( sType == rhs.sType )
8876 && ( pNext == rhs.pNext )
8877 && ( dedicatedAllocation == rhs.dedicatedAllocation );
8878 }
8879
8880 bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
8881 {
8882 return !operator==( rhs );
8883 }
8884
8885 private:
8886 StructureType sType;
8887
8888 public:
8889 const void* pNext;
8890 Bool32 dedicatedAllocation;
8891 };
8892 static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
8893
8894 struct DedicatedAllocationBufferCreateInfoNV
8895 {
8896 DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
8897 : sType( StructureType::eDedicatedAllocationBufferCreateInfoNV )
8898 , pNext( nullptr )
8899 , dedicatedAllocation( dedicatedAllocation_ )
8900 {
8901 }
8902
8903 DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
8904 {
8905 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
8906 }
8907
8908 DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
8909 {
8910 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
8911 return *this;
8912 }
8913
8914 DedicatedAllocationBufferCreateInfoNV& setSType( StructureType sType_ )
8915 {
8916 sType = sType_;
8917 return *this;
8918 }
8919
8920 DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
8921 {
8922 pNext = pNext_;
8923 return *this;
8924 }
8925
8926 DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
8927 {
8928 dedicatedAllocation = dedicatedAllocation_;
8929 return *this;
8930 }
8931
8932 operator const VkDedicatedAllocationBufferCreateInfoNV&() const
8933 {
8934 return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
8935 }
8936
8937 bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
8938 {
8939 return ( sType == rhs.sType )
8940 && ( pNext == rhs.pNext )
8941 && ( dedicatedAllocation == rhs.dedicatedAllocation );
8942 }
8943
8944 bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
8945 {
8946 return !operator==( rhs );
8947 }
8948
8949 private:
8950 StructureType sType;
8951
8952 public:
8953 const void* pNext;
8954 Bool32 dedicatedAllocation;
8955 };
8956 static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
8957
8958 struct DedicatedAllocationMemoryAllocateInfoNV
8959 {
8960 DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() )
8961 : sType( StructureType::eDedicatedAllocationMemoryAllocateInfoNV )
8962 , pNext( nullptr )
8963 , image( image_ )
8964 , buffer( buffer_ )
8965 {
8966 }
8967
8968 DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
8969 {
8970 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
8971 }
8972
8973 DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
8974 {
8975 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
8976 return *this;
8977 }
8978
8979 DedicatedAllocationMemoryAllocateInfoNV& setSType( StructureType sType_ )
8980 {
8981 sType = sType_;
8982 return *this;
8983 }
8984
8985 DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
8986 {
8987 pNext = pNext_;
8988 return *this;
8989 }
8990
8991 DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
8992 {
8993 image = image_;
8994 return *this;
8995 }
8996
8997 DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
8998 {
8999 buffer = buffer_;
9000 return *this;
9001 }
9002
9003 operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const
9004 {
9005 return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
9006 }
9007
9008 bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
9009 {
9010 return ( sType == rhs.sType )
9011 && ( pNext == rhs.pNext )
9012 && ( image == rhs.image )
9013 && ( buffer == rhs.buffer );
9014 }
9015
9016 bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
9017 {
9018 return !operator==( rhs );
9019 }
9020
9021 private:
9022 StructureType sType;
9023
9024 public:
9025 const void* pNext;
9026 Image image;
9027 Buffer buffer;
9028 };
9029 static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
9030
Lenny Komow96962992016-08-31 15:03:49 -06009031#ifdef VK_USE_PLATFORM_WIN32_KHR
9032 struct ExportMemoryWin32HandleInfoNV
9033 {
9034 ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 )
9035 : sType( StructureType::eExportMemoryWin32HandleInfoNV )
9036 , pNext( nullptr )
9037 , pAttributes( pAttributes_ )
9038 , dwAccess( dwAccess_ )
9039 {
9040 }
9041
9042 ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
9043 {
9044 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
9045 }
9046
9047 ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
9048 {
9049 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
9050 return *this;
9051 }
9052
9053 ExportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
9054 {
9055 sType = sType_;
9056 return *this;
9057 }
9058
9059 ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
9060 {
9061 pNext = pNext_;
9062 return *this;
9063 }
9064
9065 ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
9066 {
9067 pAttributes = pAttributes_;
9068 return *this;
9069 }
9070
9071 ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
9072 {
9073 dwAccess = dwAccess_;
9074 return *this;
9075 }
9076
9077 operator const VkExportMemoryWin32HandleInfoNV&() const
9078 {
9079 return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
9080 }
9081
9082 bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
9083 {
9084 return ( sType == rhs.sType )
9085 && ( pNext == rhs.pNext )
9086 && ( pAttributes == rhs.pAttributes )
9087 && ( dwAccess == rhs.dwAccess );
9088 }
9089
9090 bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
9091 {
9092 return !operator==( rhs );
9093 }
9094
9095 private:
9096 StructureType sType;
9097
9098 public:
9099 const void* pNext;
9100 const SECURITY_ATTRIBUTES* pAttributes;
9101 DWORD dwAccess;
9102 };
9103 static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
9104#endif /*VK_USE_PLATFORM_WIN32_KHR*/
9105
9106#ifdef VK_USE_PLATFORM_WIN32_KHR
9107 struct Win32KeyedMutexAcquireReleaseInfoNV
9108 {
9109 Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
9110 : sType( StructureType::eWin32KeyedMutexAcquireReleaseInfoNV )
9111 , pNext( nullptr )
9112 , acquireCount( acquireCount_ )
9113 , pAcquireSyncs( pAcquireSyncs_ )
9114 , pAcquireKeys( pAcquireKeys_ )
9115 , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
9116 , releaseCount( releaseCount_ )
9117 , pReleaseSyncs( pReleaseSyncs_ )
9118 , pReleaseKeys( pReleaseKeys_ )
9119 {
9120 }
9121
9122 Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9123 {
9124 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9125 }
9126
9127 Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9128 {
9129 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9130 return *this;
9131 }
9132
9133 Win32KeyedMutexAcquireReleaseInfoNV& setSType( StructureType sType_ )
9134 {
9135 sType = sType_;
9136 return *this;
9137 }
9138
9139 Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
9140 {
9141 pNext = pNext_;
9142 return *this;
9143 }
9144
9145 Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
9146 {
9147 acquireCount = acquireCount_;
9148 return *this;
9149 }
9150
9151 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
9152 {
9153 pAcquireSyncs = pAcquireSyncs_;
9154 return *this;
9155 }
9156
9157 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
9158 {
9159 pAcquireKeys = pAcquireKeys_;
9160 return *this;
9161 }
9162
9163 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
9164 {
9165 pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
9166 return *this;
9167 }
9168
9169 Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
9170 {
9171 releaseCount = releaseCount_;
9172 return *this;
9173 }
9174
9175 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
9176 {
9177 pReleaseSyncs = pReleaseSyncs_;
9178 return *this;
9179 }
9180
9181 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
9182 {
9183 pReleaseKeys = pReleaseKeys_;
9184 return *this;
9185 }
9186
9187 operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const
9188 {
9189 return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
9190 }
9191
9192 bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9193 {
9194 return ( sType == rhs.sType )
9195 && ( pNext == rhs.pNext )
9196 && ( acquireCount == rhs.acquireCount )
9197 && ( pAcquireSyncs == rhs.pAcquireSyncs )
9198 && ( pAcquireKeys == rhs.pAcquireKeys )
9199 && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
9200 && ( releaseCount == rhs.releaseCount )
9201 && ( pReleaseSyncs == rhs.pReleaseSyncs )
9202 && ( pReleaseKeys == rhs.pReleaseKeys );
9203 }
9204
9205 bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9206 {
9207 return !operator==( rhs );
9208 }
9209
9210 private:
9211 StructureType sType;
9212
9213 public:
9214 const void* pNext;
9215 uint32_t acquireCount;
9216 const DeviceMemory* pAcquireSyncs;
9217 const uint64_t* pAcquireKeys;
9218 const uint32_t* pAcquireTimeoutMilliseconds;
9219 uint32_t releaseCount;
9220 const DeviceMemory* pReleaseSyncs;
9221 const uint64_t* pReleaseKeys;
9222 };
9223 static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
9224#endif /*VK_USE_PLATFORM_WIN32_KHR*/
9225
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009226 struct DeviceGeneratedCommandsFeaturesNVX
9227 {
9228 DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
9229 : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
9230 , pNext( nullptr )
9231 , computeBindingPointSupport( computeBindingPointSupport_ )
9232 {
9233 }
9234
9235 DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9236 {
9237 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9238 }
9239
9240 DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9241 {
9242 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9243 return *this;
9244 }
9245
9246 DeviceGeneratedCommandsFeaturesNVX& setSType( StructureType sType_ )
9247 {
9248 sType = sType_;
9249 return *this;
9250 }
9251
9252 DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
9253 {
9254 pNext = pNext_;
9255 return *this;
9256 }
9257
9258 DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
9259 {
9260 computeBindingPointSupport = computeBindingPointSupport_;
9261 return *this;
9262 }
9263
9264 operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
9265 {
9266 return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
9267 }
9268
9269 bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9270 {
9271 return ( sType == rhs.sType )
9272 && ( pNext == rhs.pNext )
9273 && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
9274 }
9275
9276 bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9277 {
9278 return !operator==( rhs );
9279 }
9280
9281 private:
9282 StructureType sType;
9283
9284 public:
9285 const void* pNext;
9286 Bool32 computeBindingPointSupport;
9287 };
9288 static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
9289
9290 struct DeviceGeneratedCommandsLimitsNVX
9291 {
9292 DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
9293 : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
9294 , pNext( nullptr )
9295 , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
9296 , maxObjectEntryCounts( maxObjectEntryCounts_ )
9297 , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
9298 , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
9299 , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
9300 {
9301 }
9302
9303 DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9304 {
9305 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9306 }
9307
9308 DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9309 {
9310 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9311 return *this;
9312 }
9313
9314 DeviceGeneratedCommandsLimitsNVX& setSType( StructureType sType_ )
9315 {
9316 sType = sType_;
9317 return *this;
9318 }
9319
9320 DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
9321 {
9322 pNext = pNext_;
9323 return *this;
9324 }
9325
9326 DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
9327 {
9328 maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
9329 return *this;
9330 }
9331
9332 DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
9333 {
9334 maxObjectEntryCounts = maxObjectEntryCounts_;
9335 return *this;
9336 }
9337
9338 DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
9339 {
9340 minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
9341 return *this;
9342 }
9343
9344 DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
9345 {
9346 minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
9347 return *this;
9348 }
9349
9350 DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
9351 {
9352 minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
9353 return *this;
9354 }
9355
9356 operator const VkDeviceGeneratedCommandsLimitsNVX&() const
9357 {
9358 return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
9359 }
9360
9361 bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9362 {
9363 return ( sType == rhs.sType )
9364 && ( pNext == rhs.pNext )
9365 && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
9366 && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
9367 && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
9368 && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
9369 && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
9370 }
9371
9372 bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9373 {
9374 return !operator==( rhs );
9375 }
9376
9377 private:
9378 StructureType sType;
9379
9380 public:
9381 const void* pNext;
9382 uint32_t maxIndirectCommandsLayoutTokenCount;
9383 uint32_t maxObjectEntryCounts;
9384 uint32_t minSequenceCountBufferOffsetAlignment;
9385 uint32_t minSequenceIndexBufferOffsetAlignment;
9386 uint32_t minCommandsTokenBufferOffsetAlignment;
9387 };
9388 static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
9389
9390 struct CmdReserveSpaceForCommandsInfoNVX
9391 {
9392 CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
9393 : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
9394 , pNext( nullptr )
9395 , objectTable( objectTable_ )
9396 , indirectCommandsLayout( indirectCommandsLayout_ )
9397 , maxSequencesCount( maxSequencesCount_ )
9398 {
9399 }
9400
9401 CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9402 {
9403 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9404 }
9405
9406 CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9407 {
9408 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9409 return *this;
9410 }
9411
9412 CmdReserveSpaceForCommandsInfoNVX& setSType( StructureType sType_ )
9413 {
9414 sType = sType_;
9415 return *this;
9416 }
9417
9418 CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
9419 {
9420 pNext = pNext_;
9421 return *this;
9422 }
9423
9424 CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
9425 {
9426 objectTable = objectTable_;
9427 return *this;
9428 }
9429
9430 CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
9431 {
9432 indirectCommandsLayout = indirectCommandsLayout_;
9433 return *this;
9434 }
9435
9436 CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
9437 {
9438 maxSequencesCount = maxSequencesCount_;
9439 return *this;
9440 }
9441
9442 operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
9443 {
9444 return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
9445 }
9446
9447 bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9448 {
9449 return ( sType == rhs.sType )
9450 && ( pNext == rhs.pNext )
9451 && ( objectTable == rhs.objectTable )
9452 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
9453 && ( maxSequencesCount == rhs.maxSequencesCount );
9454 }
9455
9456 bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9457 {
9458 return !operator==( rhs );
9459 }
9460
9461 private:
9462 StructureType sType;
9463
9464 public:
9465 const void* pNext;
9466 ObjectTableNVX objectTable;
9467 IndirectCommandsLayoutNVX indirectCommandsLayout;
9468 uint32_t maxSequencesCount;
9469 };
9470 static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
9471
Mark Youngb5f087a2017-01-19 21:10:49 -07009472 struct PhysicalDeviceFeatures2KHR
9473 {
9474 PhysicalDeviceFeatures2KHR( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() )
9475 : sType( StructureType::ePhysicalDeviceFeatures2KHR )
9476 , pNext( nullptr )
9477 , features( features_ )
9478 {
9479 }
9480
9481 PhysicalDeviceFeatures2KHR( VkPhysicalDeviceFeatures2KHR const & rhs )
9482 {
9483 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures2KHR) );
9484 }
9485
9486 PhysicalDeviceFeatures2KHR& operator=( VkPhysicalDeviceFeatures2KHR const & rhs )
9487 {
9488 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures2KHR) );
9489 return *this;
9490 }
9491
9492 PhysicalDeviceFeatures2KHR& setSType( StructureType sType_ )
9493 {
9494 sType = sType_;
9495 return *this;
9496 }
9497
9498 PhysicalDeviceFeatures2KHR& setPNext( void* pNext_ )
9499 {
9500 pNext = pNext_;
9501 return *this;
9502 }
9503
9504 PhysicalDeviceFeatures2KHR& setFeatures( PhysicalDeviceFeatures features_ )
9505 {
9506 features = features_;
9507 return *this;
9508 }
9509
9510 operator const VkPhysicalDeviceFeatures2KHR&() const
9511 {
9512 return *reinterpret_cast<const VkPhysicalDeviceFeatures2KHR*>(this);
9513 }
9514
9515 bool operator==( PhysicalDeviceFeatures2KHR const& rhs ) const
9516 {
9517 return ( sType == rhs.sType )
9518 && ( pNext == rhs.pNext )
9519 && ( features == rhs.features );
9520 }
9521
9522 bool operator!=( PhysicalDeviceFeatures2KHR const& rhs ) const
9523 {
9524 return !operator==( rhs );
9525 }
9526
9527 private:
9528 StructureType sType;
9529
9530 public:
9531 void* pNext;
9532 PhysicalDeviceFeatures features;
9533 };
9534 static_assert( sizeof( PhysicalDeviceFeatures2KHR ) == sizeof( VkPhysicalDeviceFeatures2KHR ), "struct and wrapper have different size!" );
9535
Lenny Komowb0a17f22016-08-11 11:23:15 -06009536 enum class SubpassContents
9537 {
9538 eInline = VK_SUBPASS_CONTENTS_INLINE,
9539 eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
9540 };
9541
9542 struct PresentInfoKHR
9543 {
9544 PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
9545 : sType( StructureType::ePresentInfoKHR )
9546 , pNext( nullptr )
9547 , waitSemaphoreCount( waitSemaphoreCount_ )
9548 , pWaitSemaphores( pWaitSemaphores_ )
9549 , swapchainCount( swapchainCount_ )
9550 , pSwapchains( pSwapchains_ )
9551 , pImageIndices( pImageIndices_ )
9552 , pResults( pResults_ )
9553 {
9554 }
9555
9556 PresentInfoKHR( VkPresentInfoKHR const & rhs )
9557 {
9558 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9559 }
9560
9561 PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
9562 {
9563 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9564 return *this;
9565 }
9566
9567 PresentInfoKHR& setSType( StructureType sType_ )
9568 {
9569 sType = sType_;
9570 return *this;
9571 }
9572
9573 PresentInfoKHR& setPNext( const void* pNext_ )
9574 {
9575 pNext = pNext_;
9576 return *this;
9577 }
9578
9579 PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
9580 {
9581 waitSemaphoreCount = waitSemaphoreCount_;
9582 return *this;
9583 }
9584
9585 PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
9586 {
9587 pWaitSemaphores = pWaitSemaphores_;
9588 return *this;
9589 }
9590
9591 PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
9592 {
9593 swapchainCount = swapchainCount_;
9594 return *this;
9595 }
9596
9597 PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
9598 {
9599 pSwapchains = pSwapchains_;
9600 return *this;
9601 }
9602
9603 PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
9604 {
9605 pImageIndices = pImageIndices_;
9606 return *this;
9607 }
9608
9609 PresentInfoKHR& setPResults( Result* pResults_ )
9610 {
9611 pResults = pResults_;
9612 return *this;
9613 }
9614
9615 operator const VkPresentInfoKHR&() const
9616 {
9617 return *reinterpret_cast<const VkPresentInfoKHR*>(this);
9618 }
9619
9620 bool operator==( PresentInfoKHR const& rhs ) const
9621 {
9622 return ( sType == rhs.sType )
9623 && ( pNext == rhs.pNext )
9624 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
9625 && ( pWaitSemaphores == rhs.pWaitSemaphores )
9626 && ( swapchainCount == rhs.swapchainCount )
9627 && ( pSwapchains == rhs.pSwapchains )
9628 && ( pImageIndices == rhs.pImageIndices )
9629 && ( pResults == rhs.pResults );
9630 }
9631
9632 bool operator!=( PresentInfoKHR const& rhs ) const
9633 {
9634 return !operator==( rhs );
9635 }
9636
9637 private:
9638 StructureType sType;
9639
9640 public:
9641 const void* pNext;
9642 uint32_t waitSemaphoreCount;
9643 const Semaphore* pWaitSemaphores;
9644 uint32_t swapchainCount;
9645 const SwapchainKHR* pSwapchains;
9646 const uint32_t* pImageIndices;
9647 Result* pResults;
9648 };
9649 static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
9650
9651 enum class DynamicState
9652 {
9653 eViewport = VK_DYNAMIC_STATE_VIEWPORT,
9654 eScissor = VK_DYNAMIC_STATE_SCISSOR,
9655 eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
9656 eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
9657 eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
9658 eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
9659 eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
9660 eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
9661 eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
9662 };
9663
9664 struct PipelineDynamicStateCreateInfo
9665 {
9666 PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
9667 : sType( StructureType::ePipelineDynamicStateCreateInfo )
9668 , pNext( nullptr )
9669 , flags( flags_ )
9670 , dynamicStateCount( dynamicStateCount_ )
9671 , pDynamicStates( pDynamicStates_ )
9672 {
9673 }
9674
9675 PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
9676 {
9677 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9678 }
9679
9680 PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
9681 {
9682 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9683 return *this;
9684 }
9685
9686 PipelineDynamicStateCreateInfo& setSType( StructureType sType_ )
9687 {
9688 sType = sType_;
9689 return *this;
9690 }
9691
9692 PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
9693 {
9694 pNext = pNext_;
9695 return *this;
9696 }
9697
9698 PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
9699 {
9700 flags = flags_;
9701 return *this;
9702 }
9703
9704 PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
9705 {
9706 dynamicStateCount = dynamicStateCount_;
9707 return *this;
9708 }
9709
9710 PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
9711 {
9712 pDynamicStates = pDynamicStates_;
9713 return *this;
9714 }
9715
9716 operator const VkPipelineDynamicStateCreateInfo&() const
9717 {
9718 return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
9719 }
9720
9721 bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
9722 {
9723 return ( sType == rhs.sType )
9724 && ( pNext == rhs.pNext )
9725 && ( flags == rhs.flags )
9726 && ( dynamicStateCount == rhs.dynamicStateCount )
9727 && ( pDynamicStates == rhs.pDynamicStates );
9728 }
9729
9730 bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
9731 {
9732 return !operator==( rhs );
9733 }
9734
9735 private:
9736 StructureType sType;
9737
9738 public:
9739 const void* pNext;
9740 PipelineDynamicStateCreateFlags flags;
9741 uint32_t dynamicStateCount;
9742 const DynamicState* pDynamicStates;
9743 };
9744 static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
9745
9746 enum class QueueFlagBits
9747 {
9748 eGraphics = VK_QUEUE_GRAPHICS_BIT,
9749 eCompute = VK_QUEUE_COMPUTE_BIT,
9750 eTransfer = VK_QUEUE_TRANSFER_BIT,
9751 eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
9752 };
9753
9754 using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
9755
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009756 VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -06009757 {
9758 return QueueFlags( bit0 ) | bit1;
9759 }
9760
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009761 VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
9762 {
9763 return ~( QueueFlags( bits ) );
9764 }
9765
9766 template <> struct FlagTraits<QueueFlagBits>
9767 {
9768 enum
9769 {
9770 allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
9771 };
9772 };
9773
Lenny Komowb0a17f22016-08-11 11:23:15 -06009774 struct QueueFamilyProperties
9775 {
9776 operator const VkQueueFamilyProperties&() const
9777 {
9778 return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
9779 }
9780
9781 bool operator==( QueueFamilyProperties const& rhs ) const
9782 {
9783 return ( queueFlags == rhs.queueFlags )
9784 && ( queueCount == rhs.queueCount )
9785 && ( timestampValidBits == rhs.timestampValidBits )
9786 && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
9787 }
9788
9789 bool operator!=( QueueFamilyProperties const& rhs ) const
9790 {
9791 return !operator==( rhs );
9792 }
9793
9794 QueueFlags queueFlags;
9795 uint32_t queueCount;
9796 uint32_t timestampValidBits;
9797 Extent3D minImageTransferGranularity;
9798 };
9799 static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
9800
Mark Youngb5f087a2017-01-19 21:10:49 -07009801 struct QueueFamilyProperties2KHR
9802 {
9803 operator const VkQueueFamilyProperties2KHR&() const
9804 {
9805 return *reinterpret_cast<const VkQueueFamilyProperties2KHR*>(this);
9806 }
9807
9808 bool operator==( QueueFamilyProperties2KHR const& rhs ) const
9809 {
9810 return ( sType == rhs.sType )
9811 && ( pNext == rhs.pNext )
9812 && ( queueFamilyProperties == rhs.queueFamilyProperties );
9813 }
9814
9815 bool operator!=( QueueFamilyProperties2KHR const& rhs ) const
9816 {
9817 return !operator==( rhs );
9818 }
9819
9820 private:
9821 StructureType sType;
9822
9823 public:
9824 void* pNext;
9825 QueueFamilyProperties queueFamilyProperties;
9826 };
9827 static_assert( sizeof( QueueFamilyProperties2KHR ) == sizeof( VkQueueFamilyProperties2KHR ), "struct and wrapper have different size!" );
9828
Lenny Komowb0a17f22016-08-11 11:23:15 -06009829 enum class MemoryPropertyFlagBits
9830 {
9831 eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
9832 eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
9833 eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
9834 eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
9835 eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
9836 };
9837
9838 using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
9839
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009840 VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -06009841 {
9842 return MemoryPropertyFlags( bit0 ) | bit1;
9843 }
9844
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009845 VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
9846 {
9847 return ~( MemoryPropertyFlags( bits ) );
9848 }
9849
9850 template <> struct FlagTraits<MemoryPropertyFlagBits>
9851 {
9852 enum
9853 {
9854 allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
9855 };
9856 };
9857
Lenny Komowb0a17f22016-08-11 11:23:15 -06009858 struct MemoryType
9859 {
9860 operator const VkMemoryType&() const
9861 {
9862 return *reinterpret_cast<const VkMemoryType*>(this);
9863 }
9864
9865 bool operator==( MemoryType const& rhs ) const
9866 {
9867 return ( propertyFlags == rhs.propertyFlags )
9868 && ( heapIndex == rhs.heapIndex );
9869 }
9870
9871 bool operator!=( MemoryType const& rhs ) const
9872 {
9873 return !operator==( rhs );
9874 }
9875
9876 MemoryPropertyFlags propertyFlags;
9877 uint32_t heapIndex;
9878 };
9879 static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
9880
9881 enum class MemoryHeapFlagBits
9882 {
9883 eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT
9884 };
9885
9886 using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
9887
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009888 VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -06009889 {
9890 return MemoryHeapFlags( bit0 ) | bit1;
9891 }
9892
Mark Lobodzinski7e418a42016-12-12 09:44:34 -07009893 VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
9894 {
9895 return ~( MemoryHeapFlags( bits ) );
9896 }
9897
9898 template <> struct FlagTraits<MemoryHeapFlagBits>
9899 {
9900 enum
9901 {
9902 allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal)
9903 };
9904 };
9905
Lenny Komowb0a17f22016-08-11 11:23:15 -06009906 struct MemoryHeap
9907 {
9908 operator const VkMemoryHeap&() const
9909 {
9910 return *reinterpret_cast<const VkMemoryHeap*>(this);
9911 }
9912
9913 bool operator==( MemoryHeap const& rhs ) const
9914 {
9915 return ( size == rhs.size )
9916 && ( flags == rhs.flags );
9917 }
9918
9919 bool operator!=( MemoryHeap const& rhs ) const
9920 {
9921 return !operator==( rhs );
9922 }
9923
9924 DeviceSize size;
9925 MemoryHeapFlags flags;
9926 };
9927 static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
9928
9929 struct PhysicalDeviceMemoryProperties
9930 {
9931 operator const VkPhysicalDeviceMemoryProperties&() const
9932 {
9933 return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
9934 }
9935
9936 bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
9937 {
9938 return ( memoryTypeCount == rhs.memoryTypeCount )
9939 && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
9940 && ( memoryHeapCount == rhs.memoryHeapCount )
9941 && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
9942 }
9943
9944 bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
9945 {
9946 return !operator==( rhs );
9947 }
9948
9949 uint32_t memoryTypeCount;
9950 MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
9951 uint32_t memoryHeapCount;
9952 MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
9953 };
9954 static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
9955
Mark Youngb5f087a2017-01-19 21:10:49 -07009956 struct PhysicalDeviceMemoryProperties2KHR
9957 {
9958 operator const VkPhysicalDeviceMemoryProperties2KHR&() const
9959 {
9960 return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2KHR*>(this);
9961 }
9962
9963 bool operator==( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
9964 {
9965 return ( sType == rhs.sType )
9966 && ( pNext == rhs.pNext )
9967 && ( memoryProperties == rhs.memoryProperties );
9968 }
9969
9970 bool operator!=( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
9971 {
9972 return !operator==( rhs );
9973 }
9974
9975 private:
9976 StructureType sType;
9977
9978 public:
9979 void* pNext;
9980 PhysicalDeviceMemoryProperties memoryProperties;
9981 };
9982 static_assert( sizeof( PhysicalDeviceMemoryProperties2KHR ) == sizeof( VkPhysicalDeviceMemoryProperties2KHR ), "struct and wrapper have different size!" );
9983
Lenny Komowb0a17f22016-08-11 11:23:15 -06009984 enum class AccessFlagBits
9985 {
9986 eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
9987 eIndexRead = VK_ACCESS_INDEX_READ_BIT,
9988 eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
9989 eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
9990 eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
9991 eShaderRead = VK_ACCESS_SHADER_READ_BIT,
9992 eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
9993 eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
9994 eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
9995 eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
9996 eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
9997 eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
9998 eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
9999 eHostRead = VK_ACCESS_HOST_READ_BIT,
10000 eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
10001 eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010002 eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
10003 eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
10004 eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX
Lenny Komowb0a17f22016-08-11 11:23:15 -060010005 };
10006
10007 using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
10008
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010009 VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010010 {
10011 return AccessFlags( bit0 ) | bit1;
10012 }
10013
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010014 VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
10015 {
10016 return ~( AccessFlags( bits ) );
10017 }
10018
10019 template <> struct FlagTraits<AccessFlagBits>
10020 {
10021 enum
10022 {
10023 allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX)
10024 };
10025 };
10026
Lenny Komowb0a17f22016-08-11 11:23:15 -060010027 struct MemoryBarrier
10028 {
10029 MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
10030 : sType( StructureType::eMemoryBarrier )
10031 , pNext( nullptr )
10032 , srcAccessMask( srcAccessMask_ )
10033 , dstAccessMask( dstAccessMask_ )
10034 {
10035 }
10036
10037 MemoryBarrier( VkMemoryBarrier const & rhs )
10038 {
10039 memcpy( this, &rhs, sizeof(MemoryBarrier) );
10040 }
10041
10042 MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
10043 {
10044 memcpy( this, &rhs, sizeof(MemoryBarrier) );
10045 return *this;
10046 }
10047
10048 MemoryBarrier& setSType( StructureType sType_ )
10049 {
10050 sType = sType_;
10051 return *this;
10052 }
10053
10054 MemoryBarrier& setPNext( const void* pNext_ )
10055 {
10056 pNext = pNext_;
10057 return *this;
10058 }
10059
10060 MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
10061 {
10062 srcAccessMask = srcAccessMask_;
10063 return *this;
10064 }
10065
10066 MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
10067 {
10068 dstAccessMask = dstAccessMask_;
10069 return *this;
10070 }
10071
10072 operator const VkMemoryBarrier&() const
10073 {
10074 return *reinterpret_cast<const VkMemoryBarrier*>(this);
10075 }
10076
10077 bool operator==( MemoryBarrier const& rhs ) const
10078 {
10079 return ( sType == rhs.sType )
10080 && ( pNext == rhs.pNext )
10081 && ( srcAccessMask == rhs.srcAccessMask )
10082 && ( dstAccessMask == rhs.dstAccessMask );
10083 }
10084
10085 bool operator!=( MemoryBarrier const& rhs ) const
10086 {
10087 return !operator==( rhs );
10088 }
10089
10090 private:
10091 StructureType sType;
10092
10093 public:
10094 const void* pNext;
10095 AccessFlags srcAccessMask;
10096 AccessFlags dstAccessMask;
10097 };
10098 static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
10099
10100 struct BufferMemoryBarrier
10101 {
10102 BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
10103 : sType( StructureType::eBufferMemoryBarrier )
10104 , pNext( nullptr )
10105 , srcAccessMask( srcAccessMask_ )
10106 , dstAccessMask( dstAccessMask_ )
10107 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
10108 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
10109 , buffer( buffer_ )
10110 , offset( offset_ )
10111 , size( size_ )
10112 {
10113 }
10114
10115 BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
10116 {
10117 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
10118 }
10119
10120 BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
10121 {
10122 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
10123 return *this;
10124 }
10125
10126 BufferMemoryBarrier& setSType( StructureType sType_ )
10127 {
10128 sType = sType_;
10129 return *this;
10130 }
10131
10132 BufferMemoryBarrier& setPNext( const void* pNext_ )
10133 {
10134 pNext = pNext_;
10135 return *this;
10136 }
10137
10138 BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
10139 {
10140 srcAccessMask = srcAccessMask_;
10141 return *this;
10142 }
10143
10144 BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
10145 {
10146 dstAccessMask = dstAccessMask_;
10147 return *this;
10148 }
10149
10150 BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
10151 {
10152 srcQueueFamilyIndex = srcQueueFamilyIndex_;
10153 return *this;
10154 }
10155
10156 BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
10157 {
10158 dstQueueFamilyIndex = dstQueueFamilyIndex_;
10159 return *this;
10160 }
10161
10162 BufferMemoryBarrier& setBuffer( Buffer buffer_ )
10163 {
10164 buffer = buffer_;
10165 return *this;
10166 }
10167
10168 BufferMemoryBarrier& setOffset( DeviceSize offset_ )
10169 {
10170 offset = offset_;
10171 return *this;
10172 }
10173
10174 BufferMemoryBarrier& setSize( DeviceSize size_ )
10175 {
10176 size = size_;
10177 return *this;
10178 }
10179
10180 operator const VkBufferMemoryBarrier&() const
10181 {
10182 return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
10183 }
10184
10185 bool operator==( BufferMemoryBarrier const& rhs ) const
10186 {
10187 return ( sType == rhs.sType )
10188 && ( pNext == rhs.pNext )
10189 && ( srcAccessMask == rhs.srcAccessMask )
10190 && ( dstAccessMask == rhs.dstAccessMask )
10191 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
10192 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
10193 && ( buffer == rhs.buffer )
10194 && ( offset == rhs.offset )
10195 && ( size == rhs.size );
10196 }
10197
10198 bool operator!=( BufferMemoryBarrier const& rhs ) const
10199 {
10200 return !operator==( rhs );
10201 }
10202
10203 private:
10204 StructureType sType;
10205
10206 public:
10207 const void* pNext;
10208 AccessFlags srcAccessMask;
10209 AccessFlags dstAccessMask;
10210 uint32_t srcQueueFamilyIndex;
10211 uint32_t dstQueueFamilyIndex;
10212 Buffer buffer;
10213 DeviceSize offset;
10214 DeviceSize size;
10215 };
10216 static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
10217
10218 enum class BufferUsageFlagBits
10219 {
10220 eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
10221 eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
10222 eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
10223 eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
10224 eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
10225 eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
10226 eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
10227 eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
10228 eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
10229 };
10230
10231 using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
10232
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010233 VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010234 {
10235 return BufferUsageFlags( bit0 ) | bit1;
10236 }
10237
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010238 VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
10239 {
10240 return ~( BufferUsageFlags( bits ) );
10241 }
10242
10243 template <> struct FlagTraits<BufferUsageFlagBits>
10244 {
10245 enum
10246 {
10247 allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
10248 };
10249 };
10250
Lenny Komowb0a17f22016-08-11 11:23:15 -060010251 enum class BufferCreateFlagBits
10252 {
10253 eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
10254 eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
10255 eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
10256 };
10257
10258 using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
10259
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010260 VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010261 {
10262 return BufferCreateFlags( bit0 ) | bit1;
10263 }
10264
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010265 VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
10266 {
10267 return ~( BufferCreateFlags( bits ) );
10268 }
10269
10270 template <> struct FlagTraits<BufferCreateFlagBits>
10271 {
10272 enum
10273 {
10274 allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
10275 };
10276 };
10277
Lenny Komowb0a17f22016-08-11 11:23:15 -060010278 struct BufferCreateInfo
10279 {
10280 BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
10281 : sType( StructureType::eBufferCreateInfo )
10282 , pNext( nullptr )
10283 , flags( flags_ )
10284 , size( size_ )
10285 , usage( usage_ )
10286 , sharingMode( sharingMode_ )
10287 , queueFamilyIndexCount( queueFamilyIndexCount_ )
10288 , pQueueFamilyIndices( pQueueFamilyIndices_ )
10289 {
10290 }
10291
10292 BufferCreateInfo( VkBufferCreateInfo const & rhs )
10293 {
10294 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10295 }
10296
10297 BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
10298 {
10299 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10300 return *this;
10301 }
10302
10303 BufferCreateInfo& setSType( StructureType sType_ )
10304 {
10305 sType = sType_;
10306 return *this;
10307 }
10308
10309 BufferCreateInfo& setPNext( const void* pNext_ )
10310 {
10311 pNext = pNext_;
10312 return *this;
10313 }
10314
10315 BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
10316 {
10317 flags = flags_;
10318 return *this;
10319 }
10320
10321 BufferCreateInfo& setSize( DeviceSize size_ )
10322 {
10323 size = size_;
10324 return *this;
10325 }
10326
10327 BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
10328 {
10329 usage = usage_;
10330 return *this;
10331 }
10332
10333 BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
10334 {
10335 sharingMode = sharingMode_;
10336 return *this;
10337 }
10338
10339 BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
10340 {
10341 queueFamilyIndexCount = queueFamilyIndexCount_;
10342 return *this;
10343 }
10344
10345 BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
10346 {
10347 pQueueFamilyIndices = pQueueFamilyIndices_;
10348 return *this;
10349 }
10350
10351 operator const VkBufferCreateInfo&() const
10352 {
10353 return *reinterpret_cast<const VkBufferCreateInfo*>(this);
10354 }
10355
10356 bool operator==( BufferCreateInfo const& rhs ) const
10357 {
10358 return ( sType == rhs.sType )
10359 && ( pNext == rhs.pNext )
10360 && ( flags == rhs.flags )
10361 && ( size == rhs.size )
10362 && ( usage == rhs.usage )
10363 && ( sharingMode == rhs.sharingMode )
10364 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
10365 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
10366 }
10367
10368 bool operator!=( BufferCreateInfo const& rhs ) const
10369 {
10370 return !operator==( rhs );
10371 }
10372
10373 private:
10374 StructureType sType;
10375
10376 public:
10377 const void* pNext;
10378 BufferCreateFlags flags;
10379 DeviceSize size;
10380 BufferUsageFlags usage;
10381 SharingMode sharingMode;
10382 uint32_t queueFamilyIndexCount;
10383 const uint32_t* pQueueFamilyIndices;
10384 };
10385 static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
10386
10387 enum class ShaderStageFlagBits
10388 {
10389 eVertex = VK_SHADER_STAGE_VERTEX_BIT,
10390 eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
10391 eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
10392 eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
10393 eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
10394 eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
10395 eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
10396 eAll = VK_SHADER_STAGE_ALL
10397 };
10398
10399 using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
10400
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010401 VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010402 {
10403 return ShaderStageFlags( bit0 ) | bit1;
10404 }
10405
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010406 VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
10407 {
10408 return ~( ShaderStageFlags( bits ) );
10409 }
10410
10411 template <> struct FlagTraits<ShaderStageFlagBits>
10412 {
10413 enum
10414 {
10415 allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
10416 };
10417 };
10418
Lenny Komowb0a17f22016-08-11 11:23:15 -060010419 struct DescriptorSetLayoutBinding
10420 {
10421 DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
10422 : binding( binding_ )
10423 , descriptorType( descriptorType_ )
10424 , descriptorCount( descriptorCount_ )
10425 , stageFlags( stageFlags_ )
10426 , pImmutableSamplers( pImmutableSamplers_ )
10427 {
10428 }
10429
10430 DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
10431 {
10432 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10433 }
10434
10435 DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
10436 {
10437 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10438 return *this;
10439 }
10440
10441 DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
10442 {
10443 binding = binding_;
10444 return *this;
10445 }
10446
10447 DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
10448 {
10449 descriptorType = descriptorType_;
10450 return *this;
10451 }
10452
10453 DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
10454 {
10455 descriptorCount = descriptorCount_;
10456 return *this;
10457 }
10458
10459 DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
10460 {
10461 stageFlags = stageFlags_;
10462 return *this;
10463 }
10464
10465 DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
10466 {
10467 pImmutableSamplers = pImmutableSamplers_;
10468 return *this;
10469 }
10470
10471 operator const VkDescriptorSetLayoutBinding&() const
10472 {
10473 return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
10474 }
10475
10476 bool operator==( DescriptorSetLayoutBinding const& rhs ) const
10477 {
10478 return ( binding == rhs.binding )
10479 && ( descriptorType == rhs.descriptorType )
10480 && ( descriptorCount == rhs.descriptorCount )
10481 && ( stageFlags == rhs.stageFlags )
10482 && ( pImmutableSamplers == rhs.pImmutableSamplers );
10483 }
10484
10485 bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
10486 {
10487 return !operator==( rhs );
10488 }
10489
10490 uint32_t binding;
10491 DescriptorType descriptorType;
10492 uint32_t descriptorCount;
10493 ShaderStageFlags stageFlags;
10494 const Sampler* pImmutableSamplers;
10495 };
10496 static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
10497
10498 struct DescriptorSetLayoutCreateInfo
10499 {
10500 DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr )
10501 : sType( StructureType::eDescriptorSetLayoutCreateInfo )
10502 , pNext( nullptr )
10503 , flags( flags_ )
10504 , bindingCount( bindingCount_ )
10505 , pBindings( pBindings_ )
10506 {
10507 }
10508
10509 DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
10510 {
10511 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10512 }
10513
10514 DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
10515 {
10516 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10517 return *this;
10518 }
10519
10520 DescriptorSetLayoutCreateInfo& setSType( StructureType sType_ )
10521 {
10522 sType = sType_;
10523 return *this;
10524 }
10525
10526 DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
10527 {
10528 pNext = pNext_;
10529 return *this;
10530 }
10531
10532 DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
10533 {
10534 flags = flags_;
10535 return *this;
10536 }
10537
10538 DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
10539 {
10540 bindingCount = bindingCount_;
10541 return *this;
10542 }
10543
10544 DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
10545 {
10546 pBindings = pBindings_;
10547 return *this;
10548 }
10549
10550 operator const VkDescriptorSetLayoutCreateInfo&() const
10551 {
10552 return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
10553 }
10554
10555 bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
10556 {
10557 return ( sType == rhs.sType )
10558 && ( pNext == rhs.pNext )
10559 && ( flags == rhs.flags )
10560 && ( bindingCount == rhs.bindingCount )
10561 && ( pBindings == rhs.pBindings );
10562 }
10563
10564 bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
10565 {
10566 return !operator==( rhs );
10567 }
10568
10569 private:
10570 StructureType sType;
10571
10572 public:
10573 const void* pNext;
10574 DescriptorSetLayoutCreateFlags flags;
10575 uint32_t bindingCount;
10576 const DescriptorSetLayoutBinding* pBindings;
10577 };
10578 static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
10579
10580 struct PipelineShaderStageCreateInfo
10581 {
10582 PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr )
10583 : sType( StructureType::ePipelineShaderStageCreateInfo )
10584 , pNext( nullptr )
10585 , flags( flags_ )
10586 , stage( stage_ )
10587 , module( module_ )
10588 , pName( pName_ )
10589 , pSpecializationInfo( pSpecializationInfo_ )
10590 {
10591 }
10592
10593 PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
10594 {
10595 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10596 }
10597
10598 PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
10599 {
10600 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10601 return *this;
10602 }
10603
10604 PipelineShaderStageCreateInfo& setSType( StructureType sType_ )
10605 {
10606 sType = sType_;
10607 return *this;
10608 }
10609
10610 PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
10611 {
10612 pNext = pNext_;
10613 return *this;
10614 }
10615
10616 PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
10617 {
10618 flags = flags_;
10619 return *this;
10620 }
10621
10622 PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
10623 {
10624 stage = stage_;
10625 return *this;
10626 }
10627
10628 PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
10629 {
10630 module = module_;
10631 return *this;
10632 }
10633
10634 PipelineShaderStageCreateInfo& setPName( const char* pName_ )
10635 {
10636 pName = pName_;
10637 return *this;
10638 }
10639
10640 PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
10641 {
10642 pSpecializationInfo = pSpecializationInfo_;
10643 return *this;
10644 }
10645
10646 operator const VkPipelineShaderStageCreateInfo&() const
10647 {
10648 return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
10649 }
10650
10651 bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
10652 {
10653 return ( sType == rhs.sType )
10654 && ( pNext == rhs.pNext )
10655 && ( flags == rhs.flags )
10656 && ( stage == rhs.stage )
10657 && ( module == rhs.module )
10658 && ( pName == rhs.pName )
10659 && ( pSpecializationInfo == rhs.pSpecializationInfo );
10660 }
10661
10662 bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
10663 {
10664 return !operator==( rhs );
10665 }
10666
10667 private:
10668 StructureType sType;
10669
10670 public:
10671 const void* pNext;
10672 PipelineShaderStageCreateFlags flags;
10673 ShaderStageFlagBits stage;
10674 ShaderModule module;
10675 const char* pName;
10676 const SpecializationInfo* pSpecializationInfo;
10677 };
10678 static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
10679
10680 struct PushConstantRange
10681 {
10682 PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 )
10683 : stageFlags( stageFlags_ )
10684 , offset( offset_ )
10685 , size( size_ )
10686 {
10687 }
10688
10689 PushConstantRange( VkPushConstantRange const & rhs )
10690 {
10691 memcpy( this, &rhs, sizeof(PushConstantRange) );
10692 }
10693
10694 PushConstantRange& operator=( VkPushConstantRange const & rhs )
10695 {
10696 memcpy( this, &rhs, sizeof(PushConstantRange) );
10697 return *this;
10698 }
10699
10700 PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
10701 {
10702 stageFlags = stageFlags_;
10703 return *this;
10704 }
10705
10706 PushConstantRange& setOffset( uint32_t offset_ )
10707 {
10708 offset = offset_;
10709 return *this;
10710 }
10711
10712 PushConstantRange& setSize( uint32_t size_ )
10713 {
10714 size = size_;
10715 return *this;
10716 }
10717
10718 operator const VkPushConstantRange&() const
10719 {
10720 return *reinterpret_cast<const VkPushConstantRange*>(this);
10721 }
10722
10723 bool operator==( PushConstantRange const& rhs ) const
10724 {
10725 return ( stageFlags == rhs.stageFlags )
10726 && ( offset == rhs.offset )
10727 && ( size == rhs.size );
10728 }
10729
10730 bool operator!=( PushConstantRange const& rhs ) const
10731 {
10732 return !operator==( rhs );
10733 }
10734
10735 ShaderStageFlags stageFlags;
10736 uint32_t offset;
10737 uint32_t size;
10738 };
10739 static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
10740
10741 struct PipelineLayoutCreateInfo
10742 {
10743 PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr )
10744 : sType( StructureType::ePipelineLayoutCreateInfo )
10745 , pNext( nullptr )
10746 , flags( flags_ )
10747 , setLayoutCount( setLayoutCount_ )
10748 , pSetLayouts( pSetLayouts_ )
10749 , pushConstantRangeCount( pushConstantRangeCount_ )
10750 , pPushConstantRanges( pPushConstantRanges_ )
10751 {
10752 }
10753
10754 PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
10755 {
10756 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10757 }
10758
10759 PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
10760 {
10761 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10762 return *this;
10763 }
10764
10765 PipelineLayoutCreateInfo& setSType( StructureType sType_ )
10766 {
10767 sType = sType_;
10768 return *this;
10769 }
10770
10771 PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
10772 {
10773 pNext = pNext_;
10774 return *this;
10775 }
10776
10777 PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
10778 {
10779 flags = flags_;
10780 return *this;
10781 }
10782
10783 PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
10784 {
10785 setLayoutCount = setLayoutCount_;
10786 return *this;
10787 }
10788
10789 PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
10790 {
10791 pSetLayouts = pSetLayouts_;
10792 return *this;
10793 }
10794
10795 PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
10796 {
10797 pushConstantRangeCount = pushConstantRangeCount_;
10798 return *this;
10799 }
10800
10801 PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
10802 {
10803 pPushConstantRanges = pPushConstantRanges_;
10804 return *this;
10805 }
10806
10807 operator const VkPipelineLayoutCreateInfo&() const
10808 {
10809 return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
10810 }
10811
10812 bool operator==( PipelineLayoutCreateInfo const& rhs ) const
10813 {
10814 return ( sType == rhs.sType )
10815 && ( pNext == rhs.pNext )
10816 && ( flags == rhs.flags )
10817 && ( setLayoutCount == rhs.setLayoutCount )
10818 && ( pSetLayouts == rhs.pSetLayouts )
10819 && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
10820 && ( pPushConstantRanges == rhs.pPushConstantRanges );
10821 }
10822
10823 bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
10824 {
10825 return !operator==( rhs );
10826 }
10827
10828 private:
10829 StructureType sType;
10830
10831 public:
10832 const void* pNext;
10833 PipelineLayoutCreateFlags flags;
10834 uint32_t setLayoutCount;
10835 const DescriptorSetLayout* pSetLayouts;
10836 uint32_t pushConstantRangeCount;
10837 const PushConstantRange* pPushConstantRanges;
10838 };
10839 static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
10840
10841 enum class ImageUsageFlagBits
10842 {
10843 eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
10844 eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
10845 eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
10846 eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
10847 eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
10848 eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
10849 eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
10850 eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
10851 };
10852
10853 using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
10854
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010855 VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010856 {
10857 return ImageUsageFlags( bit0 ) | bit1;
10858 }
10859
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010860 VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
10861 {
10862 return ~( ImageUsageFlags( bits ) );
10863 }
10864
10865 template <> struct FlagTraits<ImageUsageFlagBits>
10866 {
10867 enum
10868 {
10869 allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
10870 };
10871 };
10872
Lenny Komowb0a17f22016-08-11 11:23:15 -060010873 enum class ImageCreateFlagBits
10874 {
10875 eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
10876 eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
10877 eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
10878 eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
Mark Youngb5f087a2017-01-19 21:10:49 -070010879 eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
10880 e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR
Lenny Komowb0a17f22016-08-11 11:23:15 -060010881 };
10882
10883 using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
10884
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010885 VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060010886 {
10887 return ImageCreateFlags( bit0 ) | bit1;
10888 }
10889
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010890 VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
10891 {
10892 return ~( ImageCreateFlags( bits ) );
10893 }
10894
10895 template <> struct FlagTraits<ImageCreateFlagBits>
10896 {
10897 enum
10898 {
Mark Youngb5f087a2017-01-19 21:10:49 -070010899 allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::e2DArrayCompatibleKHR)
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070010900 };
10901 };
10902
Mark Youngb5f087a2017-01-19 21:10:49 -070010903 struct PhysicalDeviceImageFormatInfo2KHR
10904 {
10905 PhysicalDeviceImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), ImageCreateFlags flags_ = ImageCreateFlags() )
10906 : sType( StructureType::ePhysicalDeviceImageFormatInfo2KHR )
10907 , pNext( nullptr )
10908 , format( format_ )
10909 , type( type_ )
10910 , tiling( tiling_ )
10911 , usage( usage_ )
10912 , flags( flags_ )
10913 {
10914 }
10915
10916 PhysicalDeviceImageFormatInfo2KHR( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
10917 {
10918 memcpy( this, &rhs, sizeof(PhysicalDeviceImageFormatInfo2KHR) );
10919 }
10920
10921 PhysicalDeviceImageFormatInfo2KHR& operator=( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
10922 {
10923 memcpy( this, &rhs, sizeof(PhysicalDeviceImageFormatInfo2KHR) );
10924 return *this;
10925 }
10926
10927 PhysicalDeviceImageFormatInfo2KHR& setSType( StructureType sType_ )
10928 {
10929 sType = sType_;
10930 return *this;
10931 }
10932
10933 PhysicalDeviceImageFormatInfo2KHR& setPNext( const void* pNext_ )
10934 {
10935 pNext = pNext_;
10936 return *this;
10937 }
10938
10939 PhysicalDeviceImageFormatInfo2KHR& setFormat( Format format_ )
10940 {
10941 format = format_;
10942 return *this;
10943 }
10944
10945 PhysicalDeviceImageFormatInfo2KHR& setType( ImageType type_ )
10946 {
10947 type = type_;
10948 return *this;
10949 }
10950
10951 PhysicalDeviceImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
10952 {
10953 tiling = tiling_;
10954 return *this;
10955 }
10956
10957 PhysicalDeviceImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
10958 {
10959 usage = usage_;
10960 return *this;
10961 }
10962
10963 PhysicalDeviceImageFormatInfo2KHR& setFlags( ImageCreateFlags flags_ )
10964 {
10965 flags = flags_;
10966 return *this;
10967 }
10968
10969 operator const VkPhysicalDeviceImageFormatInfo2KHR&() const
10970 {
10971 return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>(this);
10972 }
10973
10974 bool operator==( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
10975 {
10976 return ( sType == rhs.sType )
10977 && ( pNext == rhs.pNext )
10978 && ( format == rhs.format )
10979 && ( type == rhs.type )
10980 && ( tiling == rhs.tiling )
10981 && ( usage == rhs.usage )
10982 && ( flags == rhs.flags );
10983 }
10984
10985 bool operator!=( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
10986 {
10987 return !operator==( rhs );
10988 }
10989
10990 private:
10991 StructureType sType;
10992
10993 public:
10994 const void* pNext;
10995 Format format;
10996 ImageType type;
10997 ImageTiling tiling;
10998 ImageUsageFlags usage;
10999 ImageCreateFlags flags;
11000 };
11001 static_assert( sizeof( PhysicalDeviceImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceImageFormatInfo2KHR ), "struct and wrapper have different size!" );
11002
Lenny Komowb0a17f22016-08-11 11:23:15 -060011003 enum class PipelineCreateFlagBits
11004 {
11005 eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
11006 eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
11007 eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT
11008 };
11009
11010 using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
11011
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011012 VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011013 {
11014 return PipelineCreateFlags( bit0 ) | bit1;
11015 }
11016
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011017 VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
11018 {
11019 return ~( PipelineCreateFlags( bits ) );
11020 }
11021
11022 template <> struct FlagTraits<PipelineCreateFlagBits>
11023 {
11024 enum
11025 {
11026 allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative)
11027 };
11028 };
11029
Lenny Komowb0a17f22016-08-11 11:23:15 -060011030 struct ComputePipelineCreateInfo
11031 {
11032 ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
11033 : sType( StructureType::eComputePipelineCreateInfo )
11034 , pNext( nullptr )
11035 , flags( flags_ )
11036 , stage( stage_ )
11037 , layout( layout_ )
11038 , basePipelineHandle( basePipelineHandle_ )
11039 , basePipelineIndex( basePipelineIndex_ )
11040 {
11041 }
11042
11043 ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
11044 {
11045 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
11046 }
11047
11048 ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
11049 {
11050 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
11051 return *this;
11052 }
11053
11054 ComputePipelineCreateInfo& setSType( StructureType sType_ )
11055 {
11056 sType = sType_;
11057 return *this;
11058 }
11059
11060 ComputePipelineCreateInfo& setPNext( const void* pNext_ )
11061 {
11062 pNext = pNext_;
11063 return *this;
11064 }
11065
11066 ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
11067 {
11068 flags = flags_;
11069 return *this;
11070 }
11071
11072 ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
11073 {
11074 stage = stage_;
11075 return *this;
11076 }
11077
11078 ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
11079 {
11080 layout = layout_;
11081 return *this;
11082 }
11083
11084 ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
11085 {
11086 basePipelineHandle = basePipelineHandle_;
11087 return *this;
11088 }
11089
11090 ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
11091 {
11092 basePipelineIndex = basePipelineIndex_;
11093 return *this;
11094 }
11095
11096 operator const VkComputePipelineCreateInfo&() const
11097 {
11098 return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
11099 }
11100
11101 bool operator==( ComputePipelineCreateInfo const& rhs ) const
11102 {
11103 return ( sType == rhs.sType )
11104 && ( pNext == rhs.pNext )
11105 && ( flags == rhs.flags )
11106 && ( stage == rhs.stage )
11107 && ( layout == rhs.layout )
11108 && ( basePipelineHandle == rhs.basePipelineHandle )
11109 && ( basePipelineIndex == rhs.basePipelineIndex );
11110 }
11111
11112 bool operator!=( ComputePipelineCreateInfo const& rhs ) const
11113 {
11114 return !operator==( rhs );
11115 }
11116
11117 private:
11118 StructureType sType;
11119
11120 public:
11121 const void* pNext;
11122 PipelineCreateFlags flags;
11123 PipelineShaderStageCreateInfo stage;
11124 PipelineLayout layout;
11125 Pipeline basePipelineHandle;
11126 int32_t basePipelineIndex;
11127 };
11128 static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
11129
11130 enum class ColorComponentFlagBits
11131 {
11132 eR = VK_COLOR_COMPONENT_R_BIT,
11133 eG = VK_COLOR_COMPONENT_G_BIT,
11134 eB = VK_COLOR_COMPONENT_B_BIT,
11135 eA = VK_COLOR_COMPONENT_A_BIT
11136 };
11137
11138 using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
11139
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011140 VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011141 {
11142 return ColorComponentFlags( bit0 ) | bit1;
11143 }
11144
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011145 VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
11146 {
11147 return ~( ColorComponentFlags( bits ) );
11148 }
11149
11150 template <> struct FlagTraits<ColorComponentFlagBits>
11151 {
11152 enum
11153 {
11154 allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
11155 };
11156 };
11157
Lenny Komowb0a17f22016-08-11 11:23:15 -060011158 struct PipelineColorBlendAttachmentState
11159 {
11160 PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
11161 : blendEnable( blendEnable_ )
11162 , srcColorBlendFactor( srcColorBlendFactor_ )
11163 , dstColorBlendFactor( dstColorBlendFactor_ )
11164 , colorBlendOp( colorBlendOp_ )
11165 , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
11166 , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
11167 , alphaBlendOp( alphaBlendOp_ )
11168 , colorWriteMask( colorWriteMask_ )
11169 {
11170 }
11171
11172 PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
11173 {
11174 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
11175 }
11176
11177 PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
11178 {
11179 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
11180 return *this;
11181 }
11182
11183 PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
11184 {
11185 blendEnable = blendEnable_;
11186 return *this;
11187 }
11188
11189 PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
11190 {
11191 srcColorBlendFactor = srcColorBlendFactor_;
11192 return *this;
11193 }
11194
11195 PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
11196 {
11197 dstColorBlendFactor = dstColorBlendFactor_;
11198 return *this;
11199 }
11200
11201 PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
11202 {
11203 colorBlendOp = colorBlendOp_;
11204 return *this;
11205 }
11206
11207 PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
11208 {
11209 srcAlphaBlendFactor = srcAlphaBlendFactor_;
11210 return *this;
11211 }
11212
11213 PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
11214 {
11215 dstAlphaBlendFactor = dstAlphaBlendFactor_;
11216 return *this;
11217 }
11218
11219 PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
11220 {
11221 alphaBlendOp = alphaBlendOp_;
11222 return *this;
11223 }
11224
11225 PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
11226 {
11227 colorWriteMask = colorWriteMask_;
11228 return *this;
11229 }
11230
11231 operator const VkPipelineColorBlendAttachmentState&() const
11232 {
11233 return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
11234 }
11235
11236 bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
11237 {
11238 return ( blendEnable == rhs.blendEnable )
11239 && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
11240 && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
11241 && ( colorBlendOp == rhs.colorBlendOp )
11242 && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
11243 && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
11244 && ( alphaBlendOp == rhs.alphaBlendOp )
11245 && ( colorWriteMask == rhs.colorWriteMask );
11246 }
11247
11248 bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
11249 {
11250 return !operator==( rhs );
11251 }
11252
11253 Bool32 blendEnable;
11254 BlendFactor srcColorBlendFactor;
11255 BlendFactor dstColorBlendFactor;
11256 BlendOp colorBlendOp;
11257 BlendFactor srcAlphaBlendFactor;
11258 BlendFactor dstAlphaBlendFactor;
11259 BlendOp alphaBlendOp;
11260 ColorComponentFlags colorWriteMask;
11261 };
11262 static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
11263
11264 struct PipelineColorBlendStateCreateInfo
11265 {
11266 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
11267 : sType( StructureType::ePipelineColorBlendStateCreateInfo )
11268 , pNext( nullptr )
11269 , flags( flags_ )
11270 , logicOpEnable( logicOpEnable_ )
11271 , logicOp( logicOp_ )
11272 , attachmentCount( attachmentCount_ )
11273 , pAttachments( pAttachments_ )
11274 {
11275 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
11276 }
11277
11278 PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
11279 {
11280 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
11281 }
11282
11283 PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
11284 {
11285 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
11286 return *this;
11287 }
11288
11289 PipelineColorBlendStateCreateInfo& setSType( StructureType sType_ )
11290 {
11291 sType = sType_;
11292 return *this;
11293 }
11294
11295 PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
11296 {
11297 pNext = pNext_;
11298 return *this;
11299 }
11300
11301 PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
11302 {
11303 flags = flags_;
11304 return *this;
11305 }
11306
11307 PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
11308 {
11309 logicOpEnable = logicOpEnable_;
11310 return *this;
11311 }
11312
11313 PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
11314 {
11315 logicOp = logicOp_;
11316 return *this;
11317 }
11318
11319 PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
11320 {
11321 attachmentCount = attachmentCount_;
11322 return *this;
11323 }
11324
11325 PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
11326 {
11327 pAttachments = pAttachments_;
11328 return *this;
11329 }
11330
11331 PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
11332 {
11333 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
11334 return *this;
11335 }
11336
11337 operator const VkPipelineColorBlendStateCreateInfo&() const
11338 {
11339 return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
11340 }
11341
11342 bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
11343 {
11344 return ( sType == rhs.sType )
11345 && ( pNext == rhs.pNext )
11346 && ( flags == rhs.flags )
11347 && ( logicOpEnable == rhs.logicOpEnable )
11348 && ( logicOp == rhs.logicOp )
11349 && ( attachmentCount == rhs.attachmentCount )
11350 && ( pAttachments == rhs.pAttachments )
11351 && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
11352 }
11353
11354 bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
11355 {
11356 return !operator==( rhs );
11357 }
11358
11359 private:
11360 StructureType sType;
11361
11362 public:
11363 const void* pNext;
11364 PipelineColorBlendStateCreateFlags flags;
11365 Bool32 logicOpEnable;
11366 LogicOp logicOp;
11367 uint32_t attachmentCount;
11368 const PipelineColorBlendAttachmentState* pAttachments;
11369 float blendConstants[4];
11370 };
11371 static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
11372
11373 enum class FenceCreateFlagBits
11374 {
11375 eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
11376 };
11377
11378 using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
11379
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011380 VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011381 {
11382 return FenceCreateFlags( bit0 ) | bit1;
11383 }
11384
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011385 VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
11386 {
11387 return ~( FenceCreateFlags( bits ) );
11388 }
11389
11390 template <> struct FlagTraits<FenceCreateFlagBits>
11391 {
11392 enum
11393 {
11394 allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
11395 };
11396 };
11397
Lenny Komowb0a17f22016-08-11 11:23:15 -060011398 struct FenceCreateInfo
11399 {
11400 FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
11401 : sType( StructureType::eFenceCreateInfo )
11402 , pNext( nullptr )
11403 , flags( flags_ )
11404 {
11405 }
11406
11407 FenceCreateInfo( VkFenceCreateInfo const & rhs )
11408 {
11409 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11410 }
11411
11412 FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
11413 {
11414 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11415 return *this;
11416 }
11417
11418 FenceCreateInfo& setSType( StructureType sType_ )
11419 {
11420 sType = sType_;
11421 return *this;
11422 }
11423
11424 FenceCreateInfo& setPNext( const void* pNext_ )
11425 {
11426 pNext = pNext_;
11427 return *this;
11428 }
11429
11430 FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
11431 {
11432 flags = flags_;
11433 return *this;
11434 }
11435
11436 operator const VkFenceCreateInfo&() const
11437 {
11438 return *reinterpret_cast<const VkFenceCreateInfo*>(this);
11439 }
11440
11441 bool operator==( FenceCreateInfo const& rhs ) const
11442 {
11443 return ( sType == rhs.sType )
11444 && ( pNext == rhs.pNext )
11445 && ( flags == rhs.flags );
11446 }
11447
11448 bool operator!=( FenceCreateInfo const& rhs ) const
11449 {
11450 return !operator==( rhs );
11451 }
11452
11453 private:
11454 StructureType sType;
11455
11456 public:
11457 const void* pNext;
11458 FenceCreateFlags flags;
11459 };
11460 static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
11461
11462 enum class FormatFeatureFlagBits
11463 {
11464 eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
11465 eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
11466 eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
11467 eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
11468 eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
11469 eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
11470 eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
11471 eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
11472 eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
11473 eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
11474 eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
11475 eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
11476 eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
Mark Youngb5f087a2017-01-19 21:10:49 -070011477 eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
11478 eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
11479 eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR
Lenny Komowb0a17f22016-08-11 11:23:15 -060011480 };
11481
11482 using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
11483
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011484 VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011485 {
11486 return FormatFeatureFlags( bit0 ) | bit1;
11487 }
11488
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011489 VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
11490 {
11491 return ~( FormatFeatureFlags( bits ) );
11492 }
11493
11494 template <> struct FlagTraits<FormatFeatureFlagBits>
11495 {
11496 enum
11497 {
Mark Youngb5f087a2017-01-19 21:10:49 -070011498 allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eTransferSrcKHR) | VkFlags(FormatFeatureFlagBits::eTransferDstKHR)
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011499 };
11500 };
11501
Lenny Komowb0a17f22016-08-11 11:23:15 -060011502 struct FormatProperties
11503 {
11504 operator const VkFormatProperties&() const
11505 {
11506 return *reinterpret_cast<const VkFormatProperties*>(this);
11507 }
11508
11509 bool operator==( FormatProperties const& rhs ) const
11510 {
11511 return ( linearTilingFeatures == rhs.linearTilingFeatures )
11512 && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
11513 && ( bufferFeatures == rhs.bufferFeatures );
11514 }
11515
11516 bool operator!=( FormatProperties const& rhs ) const
11517 {
11518 return !operator==( rhs );
11519 }
11520
11521 FormatFeatureFlags linearTilingFeatures;
11522 FormatFeatureFlags optimalTilingFeatures;
11523 FormatFeatureFlags bufferFeatures;
11524 };
11525 static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
11526
Mark Youngb5f087a2017-01-19 21:10:49 -070011527 struct FormatProperties2KHR
11528 {
11529 operator const VkFormatProperties2KHR&() const
11530 {
11531 return *reinterpret_cast<const VkFormatProperties2KHR*>(this);
11532 }
11533
11534 bool operator==( FormatProperties2KHR const& rhs ) const
11535 {
11536 return ( sType == rhs.sType )
11537 && ( pNext == rhs.pNext )
11538 && ( formatProperties == rhs.formatProperties );
11539 }
11540
11541 bool operator!=( FormatProperties2KHR const& rhs ) const
11542 {
11543 return !operator==( rhs );
11544 }
11545
11546 private:
11547 StructureType sType;
11548
11549 public:
11550 void* pNext;
11551 FormatProperties formatProperties;
11552 };
11553 static_assert( sizeof( FormatProperties2KHR ) == sizeof( VkFormatProperties2KHR ), "struct and wrapper have different size!" );
11554
Lenny Komowb0a17f22016-08-11 11:23:15 -060011555 enum class QueryControlFlagBits
11556 {
11557 ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
11558 };
11559
11560 using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
11561
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011562 VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011563 {
11564 return QueryControlFlags( bit0 ) | bit1;
11565 }
11566
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011567 VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
11568 {
11569 return ~( QueryControlFlags( bits ) );
11570 }
11571
11572 template <> struct FlagTraits<QueryControlFlagBits>
11573 {
11574 enum
11575 {
11576 allFlags = VkFlags(QueryControlFlagBits::ePrecise)
11577 };
11578 };
11579
Lenny Komowb0a17f22016-08-11 11:23:15 -060011580 enum class QueryResultFlagBits
11581 {
11582 e64 = VK_QUERY_RESULT_64_BIT,
11583 eWait = VK_QUERY_RESULT_WAIT_BIT,
11584 eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
11585 ePartial = VK_QUERY_RESULT_PARTIAL_BIT
11586 };
11587
11588 using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
11589
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011590 VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011591 {
11592 return QueryResultFlags( bit0 ) | bit1;
11593 }
11594
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011595 VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
11596 {
11597 return ~( QueryResultFlags( bits ) );
11598 }
11599
11600 template <> struct FlagTraits<QueryResultFlagBits>
11601 {
11602 enum
11603 {
11604 allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
11605 };
11606 };
11607
Lenny Komowb0a17f22016-08-11 11:23:15 -060011608 enum class CommandBufferUsageFlagBits
11609 {
11610 eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
11611 eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
11612 eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
11613 };
11614
11615 using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
11616
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011617 VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011618 {
11619 return CommandBufferUsageFlags( bit0 ) | bit1;
11620 }
11621
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011622 VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
11623 {
11624 return ~( CommandBufferUsageFlags( bits ) );
11625 }
11626
11627 template <> struct FlagTraits<CommandBufferUsageFlagBits>
11628 {
11629 enum
11630 {
11631 allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
11632 };
11633 };
11634
Lenny Komowb0a17f22016-08-11 11:23:15 -060011635 enum class QueryPipelineStatisticFlagBits
11636 {
11637 eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
11638 eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
11639 eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
11640 eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
11641 eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
11642 eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
11643 eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
11644 eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
11645 eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
11646 eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
11647 eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
11648 };
11649
11650 using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
11651
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011652 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011653 {
11654 return QueryPipelineStatisticFlags( bit0 ) | bit1;
11655 }
11656
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011657 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
11658 {
11659 return ~( QueryPipelineStatisticFlags( bits ) );
11660 }
11661
11662 template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
11663 {
11664 enum
11665 {
11666 allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
11667 };
11668 };
11669
Lenny Komowb0a17f22016-08-11 11:23:15 -060011670 struct CommandBufferInheritanceInfo
11671 {
11672 CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11673 : sType( StructureType::eCommandBufferInheritanceInfo )
11674 , pNext( nullptr )
11675 , renderPass( renderPass_ )
11676 , subpass( subpass_ )
11677 , framebuffer( framebuffer_ )
11678 , occlusionQueryEnable( occlusionQueryEnable_ )
11679 , queryFlags( queryFlags_ )
11680 , pipelineStatistics( pipelineStatistics_ )
11681 {
11682 }
11683
11684 CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
11685 {
11686 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11687 }
11688
11689 CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
11690 {
11691 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11692 return *this;
11693 }
11694
11695 CommandBufferInheritanceInfo& setSType( StructureType sType_ )
11696 {
11697 sType = sType_;
11698 return *this;
11699 }
11700
11701 CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
11702 {
11703 pNext = pNext_;
11704 return *this;
11705 }
11706
11707 CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
11708 {
11709 renderPass = renderPass_;
11710 return *this;
11711 }
11712
11713 CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
11714 {
11715 subpass = subpass_;
11716 return *this;
11717 }
11718
11719 CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
11720 {
11721 framebuffer = framebuffer_;
11722 return *this;
11723 }
11724
11725 CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
11726 {
11727 occlusionQueryEnable = occlusionQueryEnable_;
11728 return *this;
11729 }
11730
11731 CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
11732 {
11733 queryFlags = queryFlags_;
11734 return *this;
11735 }
11736
11737 CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11738 {
11739 pipelineStatistics = pipelineStatistics_;
11740 return *this;
11741 }
11742
11743 operator const VkCommandBufferInheritanceInfo&() const
11744 {
11745 return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
11746 }
11747
11748 bool operator==( CommandBufferInheritanceInfo const& rhs ) const
11749 {
11750 return ( sType == rhs.sType )
11751 && ( pNext == rhs.pNext )
11752 && ( renderPass == rhs.renderPass )
11753 && ( subpass == rhs.subpass )
11754 && ( framebuffer == rhs.framebuffer )
11755 && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
11756 && ( queryFlags == rhs.queryFlags )
11757 && ( pipelineStatistics == rhs.pipelineStatistics );
11758 }
11759
11760 bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
11761 {
11762 return !operator==( rhs );
11763 }
11764
11765 private:
11766 StructureType sType;
11767
11768 public:
11769 const void* pNext;
11770 RenderPass renderPass;
11771 uint32_t subpass;
11772 Framebuffer framebuffer;
11773 Bool32 occlusionQueryEnable;
11774 QueryControlFlags queryFlags;
11775 QueryPipelineStatisticFlags pipelineStatistics;
11776 };
11777 static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
11778
11779 struct CommandBufferBeginInfo
11780 {
11781 CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
11782 : sType( StructureType::eCommandBufferBeginInfo )
11783 , pNext( nullptr )
11784 , flags( flags_ )
11785 , pInheritanceInfo( pInheritanceInfo_ )
11786 {
11787 }
11788
11789 CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
11790 {
11791 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11792 }
11793
11794 CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
11795 {
11796 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11797 return *this;
11798 }
11799
11800 CommandBufferBeginInfo& setSType( StructureType sType_ )
11801 {
11802 sType = sType_;
11803 return *this;
11804 }
11805
11806 CommandBufferBeginInfo& setPNext( const void* pNext_ )
11807 {
11808 pNext = pNext_;
11809 return *this;
11810 }
11811
11812 CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
11813 {
11814 flags = flags_;
11815 return *this;
11816 }
11817
11818 CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
11819 {
11820 pInheritanceInfo = pInheritanceInfo_;
11821 return *this;
11822 }
11823
11824 operator const VkCommandBufferBeginInfo&() const
11825 {
11826 return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
11827 }
11828
11829 bool operator==( CommandBufferBeginInfo const& rhs ) const
11830 {
11831 return ( sType == rhs.sType )
11832 && ( pNext == rhs.pNext )
11833 && ( flags == rhs.flags )
11834 && ( pInheritanceInfo == rhs.pInheritanceInfo );
11835 }
11836
11837 bool operator!=( CommandBufferBeginInfo const& rhs ) const
11838 {
11839 return !operator==( rhs );
11840 }
11841
11842 private:
11843 StructureType sType;
11844
11845 public:
11846 const void* pNext;
11847 CommandBufferUsageFlags flags;
11848 const CommandBufferInheritanceInfo* pInheritanceInfo;
11849 };
11850 static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
11851
11852 struct QueryPoolCreateInfo
11853 {
11854 QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11855 : sType( StructureType::eQueryPoolCreateInfo )
11856 , pNext( nullptr )
11857 , flags( flags_ )
11858 , queryType( queryType_ )
11859 , queryCount( queryCount_ )
11860 , pipelineStatistics( pipelineStatistics_ )
11861 {
11862 }
11863
11864 QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
11865 {
11866 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11867 }
11868
11869 QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
11870 {
11871 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11872 return *this;
11873 }
11874
11875 QueryPoolCreateInfo& setSType( StructureType sType_ )
11876 {
11877 sType = sType_;
11878 return *this;
11879 }
11880
11881 QueryPoolCreateInfo& setPNext( const void* pNext_ )
11882 {
11883 pNext = pNext_;
11884 return *this;
11885 }
11886
11887 QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
11888 {
11889 flags = flags_;
11890 return *this;
11891 }
11892
11893 QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
11894 {
11895 queryType = queryType_;
11896 return *this;
11897 }
11898
11899 QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
11900 {
11901 queryCount = queryCount_;
11902 return *this;
11903 }
11904
11905 QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11906 {
11907 pipelineStatistics = pipelineStatistics_;
11908 return *this;
11909 }
11910
11911 operator const VkQueryPoolCreateInfo&() const
11912 {
11913 return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
11914 }
11915
11916 bool operator==( QueryPoolCreateInfo const& rhs ) const
11917 {
11918 return ( sType == rhs.sType )
11919 && ( pNext == rhs.pNext )
11920 && ( flags == rhs.flags )
11921 && ( queryType == rhs.queryType )
11922 && ( queryCount == rhs.queryCount )
11923 && ( pipelineStatistics == rhs.pipelineStatistics );
11924 }
11925
11926 bool operator!=( QueryPoolCreateInfo const& rhs ) const
11927 {
11928 return !operator==( rhs );
11929 }
11930
11931 private:
11932 StructureType sType;
11933
11934 public:
11935 const void* pNext;
11936 QueryPoolCreateFlags flags;
11937 QueryType queryType;
11938 uint32_t queryCount;
11939 QueryPipelineStatisticFlags pipelineStatistics;
11940 };
11941 static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
11942
11943 enum class ImageAspectFlagBits
11944 {
11945 eColor = VK_IMAGE_ASPECT_COLOR_BIT,
11946 eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
11947 eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
11948 eMetadata = VK_IMAGE_ASPECT_METADATA_BIT
11949 };
11950
11951 using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
11952
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011953 VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060011954 {
11955 return ImageAspectFlags( bit0 ) | bit1;
11956 }
11957
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070011958 VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
11959 {
11960 return ~( ImageAspectFlags( bits ) );
11961 }
11962
11963 template <> struct FlagTraits<ImageAspectFlagBits>
11964 {
11965 enum
11966 {
11967 allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
11968 };
11969 };
11970
Lenny Komowb0a17f22016-08-11 11:23:15 -060011971 struct ImageSubresource
11972 {
11973 ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
11974 : aspectMask( aspectMask_ )
11975 , mipLevel( mipLevel_ )
11976 , arrayLayer( arrayLayer_ )
11977 {
11978 }
11979
11980 ImageSubresource( VkImageSubresource const & rhs )
11981 {
11982 memcpy( this, &rhs, sizeof(ImageSubresource) );
11983 }
11984
11985 ImageSubresource& operator=( VkImageSubresource const & rhs )
11986 {
11987 memcpy( this, &rhs, sizeof(ImageSubresource) );
11988 return *this;
11989 }
11990
11991 ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
11992 {
11993 aspectMask = aspectMask_;
11994 return *this;
11995 }
11996
11997 ImageSubresource& setMipLevel( uint32_t mipLevel_ )
11998 {
11999 mipLevel = mipLevel_;
12000 return *this;
12001 }
12002
12003 ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
12004 {
12005 arrayLayer = arrayLayer_;
12006 return *this;
12007 }
12008
12009 operator const VkImageSubresource&() const
12010 {
12011 return *reinterpret_cast<const VkImageSubresource*>(this);
12012 }
12013
12014 bool operator==( ImageSubresource const& rhs ) const
12015 {
12016 return ( aspectMask == rhs.aspectMask )
12017 && ( mipLevel == rhs.mipLevel )
12018 && ( arrayLayer == rhs.arrayLayer );
12019 }
12020
12021 bool operator!=( ImageSubresource const& rhs ) const
12022 {
12023 return !operator==( rhs );
12024 }
12025
12026 ImageAspectFlags aspectMask;
12027 uint32_t mipLevel;
12028 uint32_t arrayLayer;
12029 };
12030 static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
12031
12032 struct ImageSubresourceLayers
12033 {
12034 ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
12035 : aspectMask( aspectMask_ )
12036 , mipLevel( mipLevel_ )
12037 , baseArrayLayer( baseArrayLayer_ )
12038 , layerCount( layerCount_ )
12039 {
12040 }
12041
12042 ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
12043 {
12044 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
12045 }
12046
12047 ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
12048 {
12049 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
12050 return *this;
12051 }
12052
12053 ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
12054 {
12055 aspectMask = aspectMask_;
12056 return *this;
12057 }
12058
12059 ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
12060 {
12061 mipLevel = mipLevel_;
12062 return *this;
12063 }
12064
12065 ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
12066 {
12067 baseArrayLayer = baseArrayLayer_;
12068 return *this;
12069 }
12070
12071 ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
12072 {
12073 layerCount = layerCount_;
12074 return *this;
12075 }
12076
12077 operator const VkImageSubresourceLayers&() const
12078 {
12079 return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
12080 }
12081
12082 bool operator==( ImageSubresourceLayers const& rhs ) const
12083 {
12084 return ( aspectMask == rhs.aspectMask )
12085 && ( mipLevel == rhs.mipLevel )
12086 && ( baseArrayLayer == rhs.baseArrayLayer )
12087 && ( layerCount == rhs.layerCount );
12088 }
12089
12090 bool operator!=( ImageSubresourceLayers const& rhs ) const
12091 {
12092 return !operator==( rhs );
12093 }
12094
12095 ImageAspectFlags aspectMask;
12096 uint32_t mipLevel;
12097 uint32_t baseArrayLayer;
12098 uint32_t layerCount;
12099 };
12100 static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
12101
12102 struct ImageSubresourceRange
12103 {
12104 ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
12105 : aspectMask( aspectMask_ )
12106 , baseMipLevel( baseMipLevel_ )
12107 , levelCount( levelCount_ )
12108 , baseArrayLayer( baseArrayLayer_ )
12109 , layerCount( layerCount_ )
12110 {
12111 }
12112
12113 ImageSubresourceRange( VkImageSubresourceRange const & rhs )
12114 {
12115 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
12116 }
12117
12118 ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
12119 {
12120 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
12121 return *this;
12122 }
12123
12124 ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
12125 {
12126 aspectMask = aspectMask_;
12127 return *this;
12128 }
12129
12130 ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
12131 {
12132 baseMipLevel = baseMipLevel_;
12133 return *this;
12134 }
12135
12136 ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
12137 {
12138 levelCount = levelCount_;
12139 return *this;
12140 }
12141
12142 ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
12143 {
12144 baseArrayLayer = baseArrayLayer_;
12145 return *this;
12146 }
12147
12148 ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
12149 {
12150 layerCount = layerCount_;
12151 return *this;
12152 }
12153
12154 operator const VkImageSubresourceRange&() const
12155 {
12156 return *reinterpret_cast<const VkImageSubresourceRange*>(this);
12157 }
12158
12159 bool operator==( ImageSubresourceRange const& rhs ) const
12160 {
12161 return ( aspectMask == rhs.aspectMask )
12162 && ( baseMipLevel == rhs.baseMipLevel )
12163 && ( levelCount == rhs.levelCount )
12164 && ( baseArrayLayer == rhs.baseArrayLayer )
12165 && ( layerCount == rhs.layerCount );
12166 }
12167
12168 bool operator!=( ImageSubresourceRange const& rhs ) const
12169 {
12170 return !operator==( rhs );
12171 }
12172
12173 ImageAspectFlags aspectMask;
12174 uint32_t baseMipLevel;
12175 uint32_t levelCount;
12176 uint32_t baseArrayLayer;
12177 uint32_t layerCount;
12178 };
12179 static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
12180
12181 struct ImageMemoryBarrier
12182 {
12183 ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
12184 : sType( StructureType::eImageMemoryBarrier )
12185 , pNext( nullptr )
12186 , srcAccessMask( srcAccessMask_ )
12187 , dstAccessMask( dstAccessMask_ )
12188 , oldLayout( oldLayout_ )
12189 , newLayout( newLayout_ )
12190 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
12191 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
12192 , image( image_ )
12193 , subresourceRange( subresourceRange_ )
12194 {
12195 }
12196
12197 ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
12198 {
12199 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
12200 }
12201
12202 ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
12203 {
12204 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
12205 return *this;
12206 }
12207
12208 ImageMemoryBarrier& setSType( StructureType sType_ )
12209 {
12210 sType = sType_;
12211 return *this;
12212 }
12213
12214 ImageMemoryBarrier& setPNext( const void* pNext_ )
12215 {
12216 pNext = pNext_;
12217 return *this;
12218 }
12219
12220 ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
12221 {
12222 srcAccessMask = srcAccessMask_;
12223 return *this;
12224 }
12225
12226 ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
12227 {
12228 dstAccessMask = dstAccessMask_;
12229 return *this;
12230 }
12231
12232 ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
12233 {
12234 oldLayout = oldLayout_;
12235 return *this;
12236 }
12237
12238 ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
12239 {
12240 newLayout = newLayout_;
12241 return *this;
12242 }
12243
12244 ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
12245 {
12246 srcQueueFamilyIndex = srcQueueFamilyIndex_;
12247 return *this;
12248 }
12249
12250 ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
12251 {
12252 dstQueueFamilyIndex = dstQueueFamilyIndex_;
12253 return *this;
12254 }
12255
12256 ImageMemoryBarrier& setImage( Image image_ )
12257 {
12258 image = image_;
12259 return *this;
12260 }
12261
12262 ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
12263 {
12264 subresourceRange = subresourceRange_;
12265 return *this;
12266 }
12267
12268 operator const VkImageMemoryBarrier&() const
12269 {
12270 return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
12271 }
12272
12273 bool operator==( ImageMemoryBarrier const& rhs ) const
12274 {
12275 return ( sType == rhs.sType )
12276 && ( pNext == rhs.pNext )
12277 && ( srcAccessMask == rhs.srcAccessMask )
12278 && ( dstAccessMask == rhs.dstAccessMask )
12279 && ( oldLayout == rhs.oldLayout )
12280 && ( newLayout == rhs.newLayout )
12281 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
12282 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
12283 && ( image == rhs.image )
12284 && ( subresourceRange == rhs.subresourceRange );
12285 }
12286
12287 bool operator!=( ImageMemoryBarrier const& rhs ) const
12288 {
12289 return !operator==( rhs );
12290 }
12291
12292 private:
12293 StructureType sType;
12294
12295 public:
12296 const void* pNext;
12297 AccessFlags srcAccessMask;
12298 AccessFlags dstAccessMask;
12299 ImageLayout oldLayout;
12300 ImageLayout newLayout;
12301 uint32_t srcQueueFamilyIndex;
12302 uint32_t dstQueueFamilyIndex;
12303 Image image;
12304 ImageSubresourceRange subresourceRange;
12305 };
12306 static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
12307
12308 struct ImageViewCreateInfo
12309 {
12310 ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
12311 : sType( StructureType::eImageViewCreateInfo )
12312 , pNext( nullptr )
12313 , flags( flags_ )
12314 , image( image_ )
12315 , viewType( viewType_ )
12316 , format( format_ )
12317 , components( components_ )
12318 , subresourceRange( subresourceRange_ )
12319 {
12320 }
12321
12322 ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
12323 {
12324 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
12325 }
12326
12327 ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
12328 {
12329 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
12330 return *this;
12331 }
12332
12333 ImageViewCreateInfo& setSType( StructureType sType_ )
12334 {
12335 sType = sType_;
12336 return *this;
12337 }
12338
12339 ImageViewCreateInfo& setPNext( const void* pNext_ )
12340 {
12341 pNext = pNext_;
12342 return *this;
12343 }
12344
12345 ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
12346 {
12347 flags = flags_;
12348 return *this;
12349 }
12350
12351 ImageViewCreateInfo& setImage( Image image_ )
12352 {
12353 image = image_;
12354 return *this;
12355 }
12356
12357 ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
12358 {
12359 viewType = viewType_;
12360 return *this;
12361 }
12362
12363 ImageViewCreateInfo& setFormat( Format format_ )
12364 {
12365 format = format_;
12366 return *this;
12367 }
12368
12369 ImageViewCreateInfo& setComponents( ComponentMapping components_ )
12370 {
12371 components = components_;
12372 return *this;
12373 }
12374
12375 ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
12376 {
12377 subresourceRange = subresourceRange_;
12378 return *this;
12379 }
12380
12381 operator const VkImageViewCreateInfo&() const
12382 {
12383 return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
12384 }
12385
12386 bool operator==( ImageViewCreateInfo const& rhs ) const
12387 {
12388 return ( sType == rhs.sType )
12389 && ( pNext == rhs.pNext )
12390 && ( flags == rhs.flags )
12391 && ( image == rhs.image )
12392 && ( viewType == rhs.viewType )
12393 && ( format == rhs.format )
12394 && ( components == rhs.components )
12395 && ( subresourceRange == rhs.subresourceRange );
12396 }
12397
12398 bool operator!=( ImageViewCreateInfo const& rhs ) const
12399 {
12400 return !operator==( rhs );
12401 }
12402
12403 private:
12404 StructureType sType;
12405
12406 public:
12407 const void* pNext;
12408 ImageViewCreateFlags flags;
12409 Image image;
12410 ImageViewType viewType;
12411 Format format;
12412 ComponentMapping components;
12413 ImageSubresourceRange subresourceRange;
12414 };
12415 static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
12416
12417 struct ImageCopy
12418 {
12419 ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12420 : srcSubresource( srcSubresource_ )
12421 , srcOffset( srcOffset_ )
12422 , dstSubresource( dstSubresource_ )
12423 , dstOffset( dstOffset_ )
12424 , extent( extent_ )
12425 {
12426 }
12427
12428 ImageCopy( VkImageCopy const & rhs )
12429 {
12430 memcpy( this, &rhs, sizeof(ImageCopy) );
12431 }
12432
12433 ImageCopy& operator=( VkImageCopy const & rhs )
12434 {
12435 memcpy( this, &rhs, sizeof(ImageCopy) );
12436 return *this;
12437 }
12438
12439 ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12440 {
12441 srcSubresource = srcSubresource_;
12442 return *this;
12443 }
12444
12445 ImageCopy& setSrcOffset( Offset3D srcOffset_ )
12446 {
12447 srcOffset = srcOffset_;
12448 return *this;
12449 }
12450
12451 ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12452 {
12453 dstSubresource = dstSubresource_;
12454 return *this;
12455 }
12456
12457 ImageCopy& setDstOffset( Offset3D dstOffset_ )
12458 {
12459 dstOffset = dstOffset_;
12460 return *this;
12461 }
12462
12463 ImageCopy& setExtent( Extent3D extent_ )
12464 {
12465 extent = extent_;
12466 return *this;
12467 }
12468
12469 operator const VkImageCopy&() const
12470 {
12471 return *reinterpret_cast<const VkImageCopy*>(this);
12472 }
12473
12474 bool operator==( ImageCopy const& rhs ) const
12475 {
12476 return ( srcSubresource == rhs.srcSubresource )
12477 && ( srcOffset == rhs.srcOffset )
12478 && ( dstSubresource == rhs.dstSubresource )
12479 && ( dstOffset == rhs.dstOffset )
12480 && ( extent == rhs.extent );
12481 }
12482
12483 bool operator!=( ImageCopy const& rhs ) const
12484 {
12485 return !operator==( rhs );
12486 }
12487
12488 ImageSubresourceLayers srcSubresource;
12489 Offset3D srcOffset;
12490 ImageSubresourceLayers dstSubresource;
12491 Offset3D dstOffset;
12492 Extent3D extent;
12493 };
12494 static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
12495
12496 struct ImageBlit
12497 {
12498 ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
12499 : srcSubresource( srcSubresource_ )
12500 , dstSubresource( dstSubresource_ )
12501 {
12502 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12503 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12504 }
12505
12506 ImageBlit( VkImageBlit const & rhs )
12507 {
12508 memcpy( this, &rhs, sizeof(ImageBlit) );
12509 }
12510
12511 ImageBlit& operator=( VkImageBlit const & rhs )
12512 {
12513 memcpy( this, &rhs, sizeof(ImageBlit) );
12514 return *this;
12515 }
12516
12517 ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12518 {
12519 srcSubresource = srcSubresource_;
12520 return *this;
12521 }
12522
12523 ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
12524 {
12525 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12526 return *this;
12527 }
12528
12529 ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12530 {
12531 dstSubresource = dstSubresource_;
12532 return *this;
12533 }
12534
12535 ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
12536 {
12537 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12538 return *this;
12539 }
12540
12541 operator const VkImageBlit&() const
12542 {
12543 return *reinterpret_cast<const VkImageBlit*>(this);
12544 }
12545
12546 bool operator==( ImageBlit const& rhs ) const
12547 {
12548 return ( srcSubresource == rhs.srcSubresource )
12549 && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
12550 && ( dstSubresource == rhs.dstSubresource )
12551 && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
12552 }
12553
12554 bool operator!=( ImageBlit const& rhs ) const
12555 {
12556 return !operator==( rhs );
12557 }
12558
12559 ImageSubresourceLayers srcSubresource;
12560 Offset3D srcOffsets[2];
12561 ImageSubresourceLayers dstSubresource;
12562 Offset3D dstOffsets[2];
12563 };
12564 static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
12565
12566 struct BufferImageCopy
12567 {
12568 BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() )
12569 : bufferOffset( bufferOffset_ )
12570 , bufferRowLength( bufferRowLength_ )
12571 , bufferImageHeight( bufferImageHeight_ )
12572 , imageSubresource( imageSubresource_ )
12573 , imageOffset( imageOffset_ )
12574 , imageExtent( imageExtent_ )
12575 {
12576 }
12577
12578 BufferImageCopy( VkBufferImageCopy const & rhs )
12579 {
12580 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12581 }
12582
12583 BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
12584 {
12585 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12586 return *this;
12587 }
12588
12589 BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
12590 {
12591 bufferOffset = bufferOffset_;
12592 return *this;
12593 }
12594
12595 BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
12596 {
12597 bufferRowLength = bufferRowLength_;
12598 return *this;
12599 }
12600
12601 BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
12602 {
12603 bufferImageHeight = bufferImageHeight_;
12604 return *this;
12605 }
12606
12607 BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
12608 {
12609 imageSubresource = imageSubresource_;
12610 return *this;
12611 }
12612
12613 BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
12614 {
12615 imageOffset = imageOffset_;
12616 return *this;
12617 }
12618
12619 BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
12620 {
12621 imageExtent = imageExtent_;
12622 return *this;
12623 }
12624
12625 operator const VkBufferImageCopy&() const
12626 {
12627 return *reinterpret_cast<const VkBufferImageCopy*>(this);
12628 }
12629
12630 bool operator==( BufferImageCopy const& rhs ) const
12631 {
12632 return ( bufferOffset == rhs.bufferOffset )
12633 && ( bufferRowLength == rhs.bufferRowLength )
12634 && ( bufferImageHeight == rhs.bufferImageHeight )
12635 && ( imageSubresource == rhs.imageSubresource )
12636 && ( imageOffset == rhs.imageOffset )
12637 && ( imageExtent == rhs.imageExtent );
12638 }
12639
12640 bool operator!=( BufferImageCopy const& rhs ) const
12641 {
12642 return !operator==( rhs );
12643 }
12644
12645 DeviceSize bufferOffset;
12646 uint32_t bufferRowLength;
12647 uint32_t bufferImageHeight;
12648 ImageSubresourceLayers imageSubresource;
12649 Offset3D imageOffset;
12650 Extent3D imageExtent;
12651 };
12652 static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
12653
12654 struct ImageResolve
12655 {
12656 ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12657 : srcSubresource( srcSubresource_ )
12658 , srcOffset( srcOffset_ )
12659 , dstSubresource( dstSubresource_ )
12660 , dstOffset( dstOffset_ )
12661 , extent( extent_ )
12662 {
12663 }
12664
12665 ImageResolve( VkImageResolve const & rhs )
12666 {
12667 memcpy( this, &rhs, sizeof(ImageResolve) );
12668 }
12669
12670 ImageResolve& operator=( VkImageResolve const & rhs )
12671 {
12672 memcpy( this, &rhs, sizeof(ImageResolve) );
12673 return *this;
12674 }
12675
12676 ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12677 {
12678 srcSubresource = srcSubresource_;
12679 return *this;
12680 }
12681
12682 ImageResolve& setSrcOffset( Offset3D srcOffset_ )
12683 {
12684 srcOffset = srcOffset_;
12685 return *this;
12686 }
12687
12688 ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12689 {
12690 dstSubresource = dstSubresource_;
12691 return *this;
12692 }
12693
12694 ImageResolve& setDstOffset( Offset3D dstOffset_ )
12695 {
12696 dstOffset = dstOffset_;
12697 return *this;
12698 }
12699
12700 ImageResolve& setExtent( Extent3D extent_ )
12701 {
12702 extent = extent_;
12703 return *this;
12704 }
12705
12706 operator const VkImageResolve&() const
12707 {
12708 return *reinterpret_cast<const VkImageResolve*>(this);
12709 }
12710
12711 bool operator==( ImageResolve const& rhs ) const
12712 {
12713 return ( srcSubresource == rhs.srcSubresource )
12714 && ( srcOffset == rhs.srcOffset )
12715 && ( dstSubresource == rhs.dstSubresource )
12716 && ( dstOffset == rhs.dstOffset )
12717 && ( extent == rhs.extent );
12718 }
12719
12720 bool operator!=( ImageResolve const& rhs ) const
12721 {
12722 return !operator==( rhs );
12723 }
12724
12725 ImageSubresourceLayers srcSubresource;
12726 Offset3D srcOffset;
12727 ImageSubresourceLayers dstSubresource;
12728 Offset3D dstOffset;
12729 Extent3D extent;
12730 };
12731 static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
12732
12733 struct ClearAttachment
12734 {
12735 ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() )
12736 : aspectMask( aspectMask_ )
12737 , colorAttachment( colorAttachment_ )
12738 , clearValue( clearValue_ )
12739 {
12740 }
12741
12742 ClearAttachment( VkClearAttachment const & rhs )
12743 {
12744 memcpy( this, &rhs, sizeof(ClearAttachment) );
12745 }
12746
12747 ClearAttachment& operator=( VkClearAttachment const & rhs )
12748 {
12749 memcpy( this, &rhs, sizeof(ClearAttachment) );
12750 return *this;
12751 }
12752
12753 ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
12754 {
12755 aspectMask = aspectMask_;
12756 return *this;
12757 }
12758
12759 ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
12760 {
12761 colorAttachment = colorAttachment_;
12762 return *this;
12763 }
12764
12765 ClearAttachment& setClearValue( ClearValue clearValue_ )
12766 {
12767 clearValue = clearValue_;
12768 return *this;
12769 }
12770
12771 operator const VkClearAttachment&() const
12772 {
12773 return *reinterpret_cast<const VkClearAttachment*>(this);
12774 }
12775
12776 ImageAspectFlags aspectMask;
12777 uint32_t colorAttachment;
12778 ClearValue clearValue;
12779 };
12780 static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
12781
12782 enum class SparseImageFormatFlagBits
12783 {
12784 eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
12785 eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
12786 eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
12787 };
12788
12789 using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
12790
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070012791 VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060012792 {
12793 return SparseImageFormatFlags( bit0 ) | bit1;
12794 }
12795
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070012796 VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
12797 {
12798 return ~( SparseImageFormatFlags( bits ) );
12799 }
12800
12801 template <> struct FlagTraits<SparseImageFormatFlagBits>
12802 {
12803 enum
12804 {
12805 allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
12806 };
12807 };
12808
Lenny Komowb0a17f22016-08-11 11:23:15 -060012809 struct SparseImageFormatProperties
12810 {
12811 operator const VkSparseImageFormatProperties&() const
12812 {
12813 return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
12814 }
12815
12816 bool operator==( SparseImageFormatProperties const& rhs ) const
12817 {
12818 return ( aspectMask == rhs.aspectMask )
12819 && ( imageGranularity == rhs.imageGranularity )
12820 && ( flags == rhs.flags );
12821 }
12822
12823 bool operator!=( SparseImageFormatProperties const& rhs ) const
12824 {
12825 return !operator==( rhs );
12826 }
12827
12828 ImageAspectFlags aspectMask;
12829 Extent3D imageGranularity;
12830 SparseImageFormatFlags flags;
12831 };
12832 static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
12833
12834 struct SparseImageMemoryRequirements
12835 {
12836 operator const VkSparseImageMemoryRequirements&() const
12837 {
12838 return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
12839 }
12840
12841 bool operator==( SparseImageMemoryRequirements const& rhs ) const
12842 {
12843 return ( formatProperties == rhs.formatProperties )
12844 && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
12845 && ( imageMipTailSize == rhs.imageMipTailSize )
12846 && ( imageMipTailOffset == rhs.imageMipTailOffset )
12847 && ( imageMipTailStride == rhs.imageMipTailStride );
12848 }
12849
12850 bool operator!=( SparseImageMemoryRequirements const& rhs ) const
12851 {
12852 return !operator==( rhs );
12853 }
12854
12855 SparseImageFormatProperties formatProperties;
12856 uint32_t imageMipTailFirstLod;
12857 DeviceSize imageMipTailSize;
12858 DeviceSize imageMipTailOffset;
12859 DeviceSize imageMipTailStride;
12860 };
12861 static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
12862
Mark Youngb5f087a2017-01-19 21:10:49 -070012863 struct SparseImageFormatProperties2KHR
12864 {
12865 operator const VkSparseImageFormatProperties2KHR&() const
12866 {
12867 return *reinterpret_cast<const VkSparseImageFormatProperties2KHR*>(this);
12868 }
12869
12870 bool operator==( SparseImageFormatProperties2KHR const& rhs ) const
12871 {
12872 return ( sType == rhs.sType )
12873 && ( pNext == rhs.pNext )
12874 && ( properties == rhs.properties );
12875 }
12876
12877 bool operator!=( SparseImageFormatProperties2KHR const& rhs ) const
12878 {
12879 return !operator==( rhs );
12880 }
12881
12882 private:
12883 StructureType sType;
12884
12885 public:
12886 void* pNext;
12887 SparseImageFormatProperties properties;
12888 };
12889 static_assert( sizeof( SparseImageFormatProperties2KHR ) == sizeof( VkSparseImageFormatProperties2KHR ), "struct and wrapper have different size!" );
12890
Lenny Komowb0a17f22016-08-11 11:23:15 -060012891 enum class SparseMemoryBindFlagBits
12892 {
12893 eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
12894 };
12895
12896 using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
12897
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070012898 VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060012899 {
12900 return SparseMemoryBindFlags( bit0 ) | bit1;
12901 }
12902
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070012903 VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
12904 {
12905 return ~( SparseMemoryBindFlags( bits ) );
12906 }
12907
12908 template <> struct FlagTraits<SparseMemoryBindFlagBits>
12909 {
12910 enum
12911 {
12912 allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
12913 };
12914 };
12915
Lenny Komowb0a17f22016-08-11 11:23:15 -060012916 struct SparseMemoryBind
12917 {
12918 SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
12919 : resourceOffset( resourceOffset_ )
12920 , size( size_ )
12921 , memory( memory_ )
12922 , memoryOffset( memoryOffset_ )
12923 , flags( flags_ )
12924 {
12925 }
12926
12927 SparseMemoryBind( VkSparseMemoryBind const & rhs )
12928 {
12929 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12930 }
12931
12932 SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
12933 {
12934 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12935 return *this;
12936 }
12937
12938 SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
12939 {
12940 resourceOffset = resourceOffset_;
12941 return *this;
12942 }
12943
12944 SparseMemoryBind& setSize( DeviceSize size_ )
12945 {
12946 size = size_;
12947 return *this;
12948 }
12949
12950 SparseMemoryBind& setMemory( DeviceMemory memory_ )
12951 {
12952 memory = memory_;
12953 return *this;
12954 }
12955
12956 SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
12957 {
12958 memoryOffset = memoryOffset_;
12959 return *this;
12960 }
12961
12962 SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
12963 {
12964 flags = flags_;
12965 return *this;
12966 }
12967
12968 operator const VkSparseMemoryBind&() const
12969 {
12970 return *reinterpret_cast<const VkSparseMemoryBind*>(this);
12971 }
12972
12973 bool operator==( SparseMemoryBind const& rhs ) const
12974 {
12975 return ( resourceOffset == rhs.resourceOffset )
12976 && ( size == rhs.size )
12977 && ( memory == rhs.memory )
12978 && ( memoryOffset == rhs.memoryOffset )
12979 && ( flags == rhs.flags );
12980 }
12981
12982 bool operator!=( SparseMemoryBind const& rhs ) const
12983 {
12984 return !operator==( rhs );
12985 }
12986
12987 DeviceSize resourceOffset;
12988 DeviceSize size;
12989 DeviceMemory memory;
12990 DeviceSize memoryOffset;
12991 SparseMemoryBindFlags flags;
12992 };
12993 static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
12994
12995 struct SparseImageMemoryBind
12996 {
12997 SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
12998 : subresource( subresource_ )
12999 , offset( offset_ )
13000 , extent( extent_ )
13001 , memory( memory_ )
13002 , memoryOffset( memoryOffset_ )
13003 , flags( flags_ )
13004 {
13005 }
13006
13007 SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
13008 {
13009 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
13010 }
13011
13012 SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
13013 {
13014 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
13015 return *this;
13016 }
13017
13018 SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
13019 {
13020 subresource = subresource_;
13021 return *this;
13022 }
13023
13024 SparseImageMemoryBind& setOffset( Offset3D offset_ )
13025 {
13026 offset = offset_;
13027 return *this;
13028 }
13029
13030 SparseImageMemoryBind& setExtent( Extent3D extent_ )
13031 {
13032 extent = extent_;
13033 return *this;
13034 }
13035
13036 SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
13037 {
13038 memory = memory_;
13039 return *this;
13040 }
13041
13042 SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
13043 {
13044 memoryOffset = memoryOffset_;
13045 return *this;
13046 }
13047
13048 SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
13049 {
13050 flags = flags_;
13051 return *this;
13052 }
13053
13054 operator const VkSparseImageMemoryBind&() const
13055 {
13056 return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
13057 }
13058
13059 bool operator==( SparseImageMemoryBind const& rhs ) const
13060 {
13061 return ( subresource == rhs.subresource )
13062 && ( offset == rhs.offset )
13063 && ( extent == rhs.extent )
13064 && ( memory == rhs.memory )
13065 && ( memoryOffset == rhs.memoryOffset )
13066 && ( flags == rhs.flags );
13067 }
13068
13069 bool operator!=( SparseImageMemoryBind const& rhs ) const
13070 {
13071 return !operator==( rhs );
13072 }
13073
13074 ImageSubresource subresource;
13075 Offset3D offset;
13076 Extent3D extent;
13077 DeviceMemory memory;
13078 DeviceSize memoryOffset;
13079 SparseMemoryBindFlags flags;
13080 };
13081 static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
13082
13083 struct SparseBufferMemoryBindInfo
13084 {
13085 SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
13086 : buffer( buffer_ )
13087 , bindCount( bindCount_ )
13088 , pBinds( pBinds_ )
13089 {
13090 }
13091
13092 SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
13093 {
13094 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
13095 }
13096
13097 SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
13098 {
13099 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
13100 return *this;
13101 }
13102
13103 SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
13104 {
13105 buffer = buffer_;
13106 return *this;
13107 }
13108
13109 SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13110 {
13111 bindCount = bindCount_;
13112 return *this;
13113 }
13114
13115 SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
13116 {
13117 pBinds = pBinds_;
13118 return *this;
13119 }
13120
13121 operator const VkSparseBufferMemoryBindInfo&() const
13122 {
13123 return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
13124 }
13125
13126 bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
13127 {
13128 return ( buffer == rhs.buffer )
13129 && ( bindCount == rhs.bindCount )
13130 && ( pBinds == rhs.pBinds );
13131 }
13132
13133 bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
13134 {
13135 return !operator==( rhs );
13136 }
13137
13138 Buffer buffer;
13139 uint32_t bindCount;
13140 const SparseMemoryBind* pBinds;
13141 };
13142 static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
13143
13144 struct SparseImageOpaqueMemoryBindInfo
13145 {
13146 SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
13147 : image( image_ )
13148 , bindCount( bindCount_ )
13149 , pBinds( pBinds_ )
13150 {
13151 }
13152
13153 SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
13154 {
13155 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
13156 }
13157
13158 SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
13159 {
13160 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
13161 return *this;
13162 }
13163
13164 SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
13165 {
13166 image = image_;
13167 return *this;
13168 }
13169
13170 SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13171 {
13172 bindCount = bindCount_;
13173 return *this;
13174 }
13175
13176 SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
13177 {
13178 pBinds = pBinds_;
13179 return *this;
13180 }
13181
13182 operator const VkSparseImageOpaqueMemoryBindInfo&() const
13183 {
13184 return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
13185 }
13186
13187 bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
13188 {
13189 return ( image == rhs.image )
13190 && ( bindCount == rhs.bindCount )
13191 && ( pBinds == rhs.pBinds );
13192 }
13193
13194 bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
13195 {
13196 return !operator==( rhs );
13197 }
13198
13199 Image image;
13200 uint32_t bindCount;
13201 const SparseMemoryBind* pBinds;
13202 };
13203 static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
13204
13205 struct SparseImageMemoryBindInfo
13206 {
13207 SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr )
13208 : image( image_ )
13209 , bindCount( bindCount_ )
13210 , pBinds( pBinds_ )
13211 {
13212 }
13213
13214 SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
13215 {
13216 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
13217 }
13218
13219 SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
13220 {
13221 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
13222 return *this;
13223 }
13224
13225 SparseImageMemoryBindInfo& setImage( Image image_ )
13226 {
13227 image = image_;
13228 return *this;
13229 }
13230
13231 SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13232 {
13233 bindCount = bindCount_;
13234 return *this;
13235 }
13236
13237 SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
13238 {
13239 pBinds = pBinds_;
13240 return *this;
13241 }
13242
13243 operator const VkSparseImageMemoryBindInfo&() const
13244 {
13245 return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
13246 }
13247
13248 bool operator==( SparseImageMemoryBindInfo const& rhs ) const
13249 {
13250 return ( image == rhs.image )
13251 && ( bindCount == rhs.bindCount )
13252 && ( pBinds == rhs.pBinds );
13253 }
13254
13255 bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
13256 {
13257 return !operator==( rhs );
13258 }
13259
13260 Image image;
13261 uint32_t bindCount;
13262 const SparseImageMemoryBind* pBinds;
13263 };
13264 static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
13265
13266 struct BindSparseInfo
13267 {
13268 BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
13269 : sType( StructureType::eBindSparseInfo )
13270 , pNext( nullptr )
13271 , waitSemaphoreCount( waitSemaphoreCount_ )
13272 , pWaitSemaphores( pWaitSemaphores_ )
13273 , bufferBindCount( bufferBindCount_ )
13274 , pBufferBinds( pBufferBinds_ )
13275 , imageOpaqueBindCount( imageOpaqueBindCount_ )
13276 , pImageOpaqueBinds( pImageOpaqueBinds_ )
13277 , imageBindCount( imageBindCount_ )
13278 , pImageBinds( pImageBinds_ )
13279 , signalSemaphoreCount( signalSemaphoreCount_ )
13280 , pSignalSemaphores( pSignalSemaphores_ )
13281 {
13282 }
13283
13284 BindSparseInfo( VkBindSparseInfo const & rhs )
13285 {
13286 memcpy( this, &rhs, sizeof(BindSparseInfo) );
13287 }
13288
13289 BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
13290 {
13291 memcpy( this, &rhs, sizeof(BindSparseInfo) );
13292 return *this;
13293 }
13294
13295 BindSparseInfo& setSType( StructureType sType_ )
13296 {
13297 sType = sType_;
13298 return *this;
13299 }
13300
13301 BindSparseInfo& setPNext( const void* pNext_ )
13302 {
13303 pNext = pNext_;
13304 return *this;
13305 }
13306
13307 BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
13308 {
13309 waitSemaphoreCount = waitSemaphoreCount_;
13310 return *this;
13311 }
13312
13313 BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
13314 {
13315 pWaitSemaphores = pWaitSemaphores_;
13316 return *this;
13317 }
13318
13319 BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
13320 {
13321 bufferBindCount = bufferBindCount_;
13322 return *this;
13323 }
13324
13325 BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
13326 {
13327 pBufferBinds = pBufferBinds_;
13328 return *this;
13329 }
13330
13331 BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
13332 {
13333 imageOpaqueBindCount = imageOpaqueBindCount_;
13334 return *this;
13335 }
13336
13337 BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
13338 {
13339 pImageOpaqueBinds = pImageOpaqueBinds_;
13340 return *this;
13341 }
13342
13343 BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
13344 {
13345 imageBindCount = imageBindCount_;
13346 return *this;
13347 }
13348
13349 BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
13350 {
13351 pImageBinds = pImageBinds_;
13352 return *this;
13353 }
13354
13355 BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
13356 {
13357 signalSemaphoreCount = signalSemaphoreCount_;
13358 return *this;
13359 }
13360
13361 BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
13362 {
13363 pSignalSemaphores = pSignalSemaphores_;
13364 return *this;
13365 }
13366
13367 operator const VkBindSparseInfo&() const
13368 {
13369 return *reinterpret_cast<const VkBindSparseInfo*>(this);
13370 }
13371
13372 bool operator==( BindSparseInfo const& rhs ) const
13373 {
13374 return ( sType == rhs.sType )
13375 && ( pNext == rhs.pNext )
13376 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
13377 && ( pWaitSemaphores == rhs.pWaitSemaphores )
13378 && ( bufferBindCount == rhs.bufferBindCount )
13379 && ( pBufferBinds == rhs.pBufferBinds )
13380 && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
13381 && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
13382 && ( imageBindCount == rhs.imageBindCount )
13383 && ( pImageBinds == rhs.pImageBinds )
13384 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
13385 && ( pSignalSemaphores == rhs.pSignalSemaphores );
13386 }
13387
13388 bool operator!=( BindSparseInfo const& rhs ) const
13389 {
13390 return !operator==( rhs );
13391 }
13392
13393 private:
13394 StructureType sType;
13395
13396 public:
13397 const void* pNext;
13398 uint32_t waitSemaphoreCount;
13399 const Semaphore* pWaitSemaphores;
13400 uint32_t bufferBindCount;
13401 const SparseBufferMemoryBindInfo* pBufferBinds;
13402 uint32_t imageOpaqueBindCount;
13403 const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
13404 uint32_t imageBindCount;
13405 const SparseImageMemoryBindInfo* pImageBinds;
13406 uint32_t signalSemaphoreCount;
13407 const Semaphore* pSignalSemaphores;
13408 };
13409 static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
13410
13411 enum class PipelineStageFlagBits
13412 {
13413 eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
13414 eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
13415 eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
13416 eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
13417 eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
13418 eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
13419 eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
13420 eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
13421 eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
13422 eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
13423 eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
13424 eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
13425 eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
13426 eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
13427 eHost = VK_PIPELINE_STAGE_HOST_BIT,
13428 eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013429 eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
13430 eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
Lenny Komowb0a17f22016-08-11 11:23:15 -060013431 };
13432
13433 using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
13434
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013435 VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060013436 {
13437 return PipelineStageFlags( bit0 ) | bit1;
13438 }
13439
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013440 VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
13441 {
13442 return ~( PipelineStageFlags( bits ) );
13443 }
13444
13445 template <> struct FlagTraits<PipelineStageFlagBits>
13446 {
13447 enum
13448 {
13449 allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
13450 };
13451 };
13452
Lenny Komowb0a17f22016-08-11 11:23:15 -060013453 enum class CommandPoolCreateFlagBits
13454 {
13455 eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
13456 eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
13457 };
13458
13459 using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
13460
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013461 VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060013462 {
13463 return CommandPoolCreateFlags( bit0 ) | bit1;
13464 }
13465
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013466 VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
13467 {
13468 return ~( CommandPoolCreateFlags( bits ) );
13469 }
13470
13471 template <> struct FlagTraits<CommandPoolCreateFlagBits>
13472 {
13473 enum
13474 {
13475 allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
13476 };
13477 };
13478
Lenny Komowb0a17f22016-08-11 11:23:15 -060013479 struct CommandPoolCreateInfo
13480 {
13481 CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
13482 : sType( StructureType::eCommandPoolCreateInfo )
13483 , pNext( nullptr )
13484 , flags( flags_ )
13485 , queueFamilyIndex( queueFamilyIndex_ )
13486 {
13487 }
13488
13489 CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
13490 {
13491 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13492 }
13493
13494 CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
13495 {
13496 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13497 return *this;
13498 }
13499
13500 CommandPoolCreateInfo& setSType( StructureType sType_ )
13501 {
13502 sType = sType_;
13503 return *this;
13504 }
13505
13506 CommandPoolCreateInfo& setPNext( const void* pNext_ )
13507 {
13508 pNext = pNext_;
13509 return *this;
13510 }
13511
13512 CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
13513 {
13514 flags = flags_;
13515 return *this;
13516 }
13517
13518 CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
13519 {
13520 queueFamilyIndex = queueFamilyIndex_;
13521 return *this;
13522 }
13523
13524 operator const VkCommandPoolCreateInfo&() const
13525 {
13526 return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
13527 }
13528
13529 bool operator==( CommandPoolCreateInfo const& rhs ) const
13530 {
13531 return ( sType == rhs.sType )
13532 && ( pNext == rhs.pNext )
13533 && ( flags == rhs.flags )
13534 && ( queueFamilyIndex == rhs.queueFamilyIndex );
13535 }
13536
13537 bool operator!=( CommandPoolCreateInfo const& rhs ) const
13538 {
13539 return !operator==( rhs );
13540 }
13541
13542 private:
13543 StructureType sType;
13544
13545 public:
13546 const void* pNext;
13547 CommandPoolCreateFlags flags;
13548 uint32_t queueFamilyIndex;
13549 };
13550 static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
13551
13552 enum class CommandPoolResetFlagBits
13553 {
13554 eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
13555 };
13556
13557 using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
13558
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013559 VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060013560 {
13561 return CommandPoolResetFlags( bit0 ) | bit1;
13562 }
13563
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013564 VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
13565 {
13566 return ~( CommandPoolResetFlags( bits ) );
13567 }
13568
13569 template <> struct FlagTraits<CommandPoolResetFlagBits>
13570 {
13571 enum
13572 {
13573 allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
13574 };
13575 };
13576
Lenny Komowb0a17f22016-08-11 11:23:15 -060013577 enum class CommandBufferResetFlagBits
13578 {
13579 eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
13580 };
13581
13582 using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
13583
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013584 VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060013585 {
13586 return CommandBufferResetFlags( bit0 ) | bit1;
13587 }
13588
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013589 VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
13590 {
13591 return ~( CommandBufferResetFlags( bits ) );
13592 }
13593
13594 template <> struct FlagTraits<CommandBufferResetFlagBits>
13595 {
13596 enum
13597 {
13598 allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
13599 };
13600 };
13601
Lenny Komowb0a17f22016-08-11 11:23:15 -060013602 enum class SampleCountFlagBits
13603 {
13604 e1 = VK_SAMPLE_COUNT_1_BIT,
13605 e2 = VK_SAMPLE_COUNT_2_BIT,
13606 e4 = VK_SAMPLE_COUNT_4_BIT,
13607 e8 = VK_SAMPLE_COUNT_8_BIT,
13608 e16 = VK_SAMPLE_COUNT_16_BIT,
13609 e32 = VK_SAMPLE_COUNT_32_BIT,
13610 e64 = VK_SAMPLE_COUNT_64_BIT
13611 };
13612
13613 using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
13614
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013615 VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060013616 {
13617 return SampleCountFlags( bit0 ) | bit1;
13618 }
13619
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070013620 VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
13621 {
13622 return ~( SampleCountFlags( bits ) );
13623 }
13624
13625 template <> struct FlagTraits<SampleCountFlagBits>
13626 {
13627 enum
13628 {
13629 allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
13630 };
13631 };
13632
Lenny Komowb0a17f22016-08-11 11:23:15 -060013633 struct ImageFormatProperties
13634 {
13635 operator const VkImageFormatProperties&() const
13636 {
13637 return *reinterpret_cast<const VkImageFormatProperties*>(this);
13638 }
13639
13640 bool operator==( ImageFormatProperties const& rhs ) const
13641 {
13642 return ( maxExtent == rhs.maxExtent )
13643 && ( maxMipLevels == rhs.maxMipLevels )
13644 && ( maxArrayLayers == rhs.maxArrayLayers )
13645 && ( sampleCounts == rhs.sampleCounts )
13646 && ( maxResourceSize == rhs.maxResourceSize );
13647 }
13648
13649 bool operator!=( ImageFormatProperties const& rhs ) const
13650 {
13651 return !operator==( rhs );
13652 }
13653
13654 Extent3D maxExtent;
13655 uint32_t maxMipLevels;
13656 uint32_t maxArrayLayers;
13657 SampleCountFlags sampleCounts;
13658 DeviceSize maxResourceSize;
13659 };
13660 static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
13661
13662 struct ImageCreateInfo
13663 {
13664 ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined )
13665 : sType( StructureType::eImageCreateInfo )
13666 , pNext( nullptr )
13667 , flags( flags_ )
13668 , imageType( imageType_ )
13669 , format( format_ )
13670 , extent( extent_ )
13671 , mipLevels( mipLevels_ )
13672 , arrayLayers( arrayLayers_ )
13673 , samples( samples_ )
13674 , tiling( tiling_ )
13675 , usage( usage_ )
13676 , sharingMode( sharingMode_ )
13677 , queueFamilyIndexCount( queueFamilyIndexCount_ )
13678 , pQueueFamilyIndices( pQueueFamilyIndices_ )
13679 , initialLayout( initialLayout_ )
13680 {
13681 }
13682
13683 ImageCreateInfo( VkImageCreateInfo const & rhs )
13684 {
13685 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13686 }
13687
13688 ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
13689 {
13690 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13691 return *this;
13692 }
13693
13694 ImageCreateInfo& setSType( StructureType sType_ )
13695 {
13696 sType = sType_;
13697 return *this;
13698 }
13699
13700 ImageCreateInfo& setPNext( const void* pNext_ )
13701 {
13702 pNext = pNext_;
13703 return *this;
13704 }
13705
13706 ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
13707 {
13708 flags = flags_;
13709 return *this;
13710 }
13711
13712 ImageCreateInfo& setImageType( ImageType imageType_ )
13713 {
13714 imageType = imageType_;
13715 return *this;
13716 }
13717
13718 ImageCreateInfo& setFormat( Format format_ )
13719 {
13720 format = format_;
13721 return *this;
13722 }
13723
13724 ImageCreateInfo& setExtent( Extent3D extent_ )
13725 {
13726 extent = extent_;
13727 return *this;
13728 }
13729
13730 ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
13731 {
13732 mipLevels = mipLevels_;
13733 return *this;
13734 }
13735
13736 ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
13737 {
13738 arrayLayers = arrayLayers_;
13739 return *this;
13740 }
13741
13742 ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
13743 {
13744 samples = samples_;
13745 return *this;
13746 }
13747
13748 ImageCreateInfo& setTiling( ImageTiling tiling_ )
13749 {
13750 tiling = tiling_;
13751 return *this;
13752 }
13753
13754 ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
13755 {
13756 usage = usage_;
13757 return *this;
13758 }
13759
13760 ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
13761 {
13762 sharingMode = sharingMode_;
13763 return *this;
13764 }
13765
13766 ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
13767 {
13768 queueFamilyIndexCount = queueFamilyIndexCount_;
13769 return *this;
13770 }
13771
13772 ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
13773 {
13774 pQueueFamilyIndices = pQueueFamilyIndices_;
13775 return *this;
13776 }
13777
13778 ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
13779 {
13780 initialLayout = initialLayout_;
13781 return *this;
13782 }
13783
13784 operator const VkImageCreateInfo&() const
13785 {
13786 return *reinterpret_cast<const VkImageCreateInfo*>(this);
13787 }
13788
13789 bool operator==( ImageCreateInfo const& rhs ) const
13790 {
13791 return ( sType == rhs.sType )
13792 && ( pNext == rhs.pNext )
13793 && ( flags == rhs.flags )
13794 && ( imageType == rhs.imageType )
13795 && ( format == rhs.format )
13796 && ( extent == rhs.extent )
13797 && ( mipLevels == rhs.mipLevels )
13798 && ( arrayLayers == rhs.arrayLayers )
13799 && ( samples == rhs.samples )
13800 && ( tiling == rhs.tiling )
13801 && ( usage == rhs.usage )
13802 && ( sharingMode == rhs.sharingMode )
13803 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
13804 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
13805 && ( initialLayout == rhs.initialLayout );
13806 }
13807
13808 bool operator!=( ImageCreateInfo const& rhs ) const
13809 {
13810 return !operator==( rhs );
13811 }
13812
13813 private:
13814 StructureType sType;
13815
13816 public:
13817 const void* pNext;
13818 ImageCreateFlags flags;
13819 ImageType imageType;
13820 Format format;
13821 Extent3D extent;
13822 uint32_t mipLevels;
13823 uint32_t arrayLayers;
13824 SampleCountFlagBits samples;
13825 ImageTiling tiling;
13826 ImageUsageFlags usage;
13827 SharingMode sharingMode;
13828 uint32_t queueFamilyIndexCount;
13829 const uint32_t* pQueueFamilyIndices;
13830 ImageLayout initialLayout;
13831 };
13832 static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
13833
13834 struct PipelineMultisampleStateCreateInfo
13835 {
13836 PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 )
13837 : sType( StructureType::ePipelineMultisampleStateCreateInfo )
13838 , pNext( nullptr )
13839 , flags( flags_ )
13840 , rasterizationSamples( rasterizationSamples_ )
13841 , sampleShadingEnable( sampleShadingEnable_ )
13842 , minSampleShading( minSampleShading_ )
13843 , pSampleMask( pSampleMask_ )
13844 , alphaToCoverageEnable( alphaToCoverageEnable_ )
13845 , alphaToOneEnable( alphaToOneEnable_ )
13846 {
13847 }
13848
13849 PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
13850 {
13851 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13852 }
13853
13854 PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
13855 {
13856 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13857 return *this;
13858 }
13859
13860 PipelineMultisampleStateCreateInfo& setSType( StructureType sType_ )
13861 {
13862 sType = sType_;
13863 return *this;
13864 }
13865
13866 PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
13867 {
13868 pNext = pNext_;
13869 return *this;
13870 }
13871
13872 PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
13873 {
13874 flags = flags_;
13875 return *this;
13876 }
13877
13878 PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
13879 {
13880 rasterizationSamples = rasterizationSamples_;
13881 return *this;
13882 }
13883
13884 PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
13885 {
13886 sampleShadingEnable = sampleShadingEnable_;
13887 return *this;
13888 }
13889
13890 PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
13891 {
13892 minSampleShading = minSampleShading_;
13893 return *this;
13894 }
13895
13896 PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
13897 {
13898 pSampleMask = pSampleMask_;
13899 return *this;
13900 }
13901
13902 PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
13903 {
13904 alphaToCoverageEnable = alphaToCoverageEnable_;
13905 return *this;
13906 }
13907
13908 PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
13909 {
13910 alphaToOneEnable = alphaToOneEnable_;
13911 return *this;
13912 }
13913
13914 operator const VkPipelineMultisampleStateCreateInfo&() const
13915 {
13916 return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
13917 }
13918
13919 bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
13920 {
13921 return ( sType == rhs.sType )
13922 && ( pNext == rhs.pNext )
13923 && ( flags == rhs.flags )
13924 && ( rasterizationSamples == rhs.rasterizationSamples )
13925 && ( sampleShadingEnable == rhs.sampleShadingEnable )
13926 && ( minSampleShading == rhs.minSampleShading )
13927 && ( pSampleMask == rhs.pSampleMask )
13928 && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
13929 && ( alphaToOneEnable == rhs.alphaToOneEnable );
13930 }
13931
13932 bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
13933 {
13934 return !operator==( rhs );
13935 }
13936
13937 private:
13938 StructureType sType;
13939
13940 public:
13941 const void* pNext;
13942 PipelineMultisampleStateCreateFlags flags;
13943 SampleCountFlagBits rasterizationSamples;
13944 Bool32 sampleShadingEnable;
13945 float minSampleShading;
13946 const SampleMask* pSampleMask;
13947 Bool32 alphaToCoverageEnable;
13948 Bool32 alphaToOneEnable;
13949 };
13950 static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
13951
13952 struct GraphicsPipelineCreateInfo
13953 {
13954 GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
13955 : sType( StructureType::eGraphicsPipelineCreateInfo )
13956 , pNext( nullptr )
13957 , flags( flags_ )
13958 , stageCount( stageCount_ )
13959 , pStages( pStages_ )
13960 , pVertexInputState( pVertexInputState_ )
13961 , pInputAssemblyState( pInputAssemblyState_ )
13962 , pTessellationState( pTessellationState_ )
13963 , pViewportState( pViewportState_ )
13964 , pRasterizationState( pRasterizationState_ )
13965 , pMultisampleState( pMultisampleState_ )
13966 , pDepthStencilState( pDepthStencilState_ )
13967 , pColorBlendState( pColorBlendState_ )
13968 , pDynamicState( pDynamicState_ )
13969 , layout( layout_ )
13970 , renderPass( renderPass_ )
13971 , subpass( subpass_ )
13972 , basePipelineHandle( basePipelineHandle_ )
13973 , basePipelineIndex( basePipelineIndex_ )
13974 {
13975 }
13976
13977 GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
13978 {
13979 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13980 }
13981
13982 GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
13983 {
13984 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13985 return *this;
13986 }
13987
13988 GraphicsPipelineCreateInfo& setSType( StructureType sType_ )
13989 {
13990 sType = sType_;
13991 return *this;
13992 }
13993
13994 GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
13995 {
13996 pNext = pNext_;
13997 return *this;
13998 }
13999
14000 GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
14001 {
14002 flags = flags_;
14003 return *this;
14004 }
14005
14006 GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
14007 {
14008 stageCount = stageCount_;
14009 return *this;
14010 }
14011
14012 GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
14013 {
14014 pStages = pStages_;
14015 return *this;
14016 }
14017
14018 GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
14019 {
14020 pVertexInputState = pVertexInputState_;
14021 return *this;
14022 }
14023
14024 GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
14025 {
14026 pInputAssemblyState = pInputAssemblyState_;
14027 return *this;
14028 }
14029
14030 GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
14031 {
14032 pTessellationState = pTessellationState_;
14033 return *this;
14034 }
14035
14036 GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
14037 {
14038 pViewportState = pViewportState_;
14039 return *this;
14040 }
14041
14042 GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
14043 {
14044 pRasterizationState = pRasterizationState_;
14045 return *this;
14046 }
14047
14048 GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
14049 {
14050 pMultisampleState = pMultisampleState_;
14051 return *this;
14052 }
14053
14054 GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
14055 {
14056 pDepthStencilState = pDepthStencilState_;
14057 return *this;
14058 }
14059
14060 GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
14061 {
14062 pColorBlendState = pColorBlendState_;
14063 return *this;
14064 }
14065
14066 GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
14067 {
14068 pDynamicState = pDynamicState_;
14069 return *this;
14070 }
14071
14072 GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
14073 {
14074 layout = layout_;
14075 return *this;
14076 }
14077
14078 GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
14079 {
14080 renderPass = renderPass_;
14081 return *this;
14082 }
14083
14084 GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
14085 {
14086 subpass = subpass_;
14087 return *this;
14088 }
14089
14090 GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
14091 {
14092 basePipelineHandle = basePipelineHandle_;
14093 return *this;
14094 }
14095
14096 GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
14097 {
14098 basePipelineIndex = basePipelineIndex_;
14099 return *this;
14100 }
14101
14102 operator const VkGraphicsPipelineCreateInfo&() const
14103 {
14104 return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
14105 }
14106
14107 bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
14108 {
14109 return ( sType == rhs.sType )
14110 && ( pNext == rhs.pNext )
14111 && ( flags == rhs.flags )
14112 && ( stageCount == rhs.stageCount )
14113 && ( pStages == rhs.pStages )
14114 && ( pVertexInputState == rhs.pVertexInputState )
14115 && ( pInputAssemblyState == rhs.pInputAssemblyState )
14116 && ( pTessellationState == rhs.pTessellationState )
14117 && ( pViewportState == rhs.pViewportState )
14118 && ( pRasterizationState == rhs.pRasterizationState )
14119 && ( pMultisampleState == rhs.pMultisampleState )
14120 && ( pDepthStencilState == rhs.pDepthStencilState )
14121 && ( pColorBlendState == rhs.pColorBlendState )
14122 && ( pDynamicState == rhs.pDynamicState )
14123 && ( layout == rhs.layout )
14124 && ( renderPass == rhs.renderPass )
14125 && ( subpass == rhs.subpass )
14126 && ( basePipelineHandle == rhs.basePipelineHandle )
14127 && ( basePipelineIndex == rhs.basePipelineIndex );
14128 }
14129
14130 bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
14131 {
14132 return !operator==( rhs );
14133 }
14134
14135 private:
14136 StructureType sType;
14137
14138 public:
14139 const void* pNext;
14140 PipelineCreateFlags flags;
14141 uint32_t stageCount;
14142 const PipelineShaderStageCreateInfo* pStages;
14143 const PipelineVertexInputStateCreateInfo* pVertexInputState;
14144 const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
14145 const PipelineTessellationStateCreateInfo* pTessellationState;
14146 const PipelineViewportStateCreateInfo* pViewportState;
14147 const PipelineRasterizationStateCreateInfo* pRasterizationState;
14148 const PipelineMultisampleStateCreateInfo* pMultisampleState;
14149 const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
14150 const PipelineColorBlendStateCreateInfo* pColorBlendState;
14151 const PipelineDynamicStateCreateInfo* pDynamicState;
14152 PipelineLayout layout;
14153 RenderPass renderPass;
14154 uint32_t subpass;
14155 Pipeline basePipelineHandle;
14156 int32_t basePipelineIndex;
14157 };
14158 static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
14159
14160 struct PhysicalDeviceLimits
14161 {
14162 operator const VkPhysicalDeviceLimits&() const
14163 {
14164 return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
14165 }
14166
14167 bool operator==( PhysicalDeviceLimits const& rhs ) const
14168 {
14169 return ( maxImageDimension1D == rhs.maxImageDimension1D )
14170 && ( maxImageDimension2D == rhs.maxImageDimension2D )
14171 && ( maxImageDimension3D == rhs.maxImageDimension3D )
14172 && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
14173 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
14174 && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
14175 && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
14176 && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
14177 && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
14178 && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
14179 && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
14180 && ( bufferImageGranularity == rhs.bufferImageGranularity )
14181 && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
14182 && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
14183 && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
14184 && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
14185 && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
14186 && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
14187 && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
14188 && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
14189 && ( maxPerStageResources == rhs.maxPerStageResources )
14190 && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
14191 && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
14192 && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
14193 && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
14194 && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
14195 && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
14196 && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
14197 && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
14198 && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
14199 && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
14200 && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
14201 && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
14202 && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
14203 && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
14204 && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
14205 && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
14206 && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
14207 && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
14208 && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
14209 && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
14210 && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
14211 && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
14212 && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
14213 && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
14214 && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
14215 && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
14216 && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
14217 && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
14218 && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
14219 && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
14220 && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
14221 && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
14222 && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
14223 && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
14224 && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
14225 && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
14226 && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
14227 && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
14228 && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
14229 && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
14230 && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
14231 && ( maxViewports == rhs.maxViewports )
14232 && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
14233 && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
14234 && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
14235 && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
14236 && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
14237 && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
14238 && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
14239 && ( minTexelOffset == rhs.minTexelOffset )
14240 && ( maxTexelOffset == rhs.maxTexelOffset )
14241 && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
14242 && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
14243 && ( minInterpolationOffset == rhs.minInterpolationOffset )
14244 && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
14245 && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
14246 && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
14247 && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
14248 && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
14249 && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
14250 && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
14251 && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
14252 && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
14253 && ( maxColorAttachments == rhs.maxColorAttachments )
14254 && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
14255 && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
14256 && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
14257 && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
14258 && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
14259 && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
14260 && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
14261 && ( timestampPeriod == rhs.timestampPeriod )
14262 && ( maxClipDistances == rhs.maxClipDistances )
14263 && ( maxCullDistances == rhs.maxCullDistances )
14264 && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
14265 && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
14266 && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
14267 && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
14268 && ( pointSizeGranularity == rhs.pointSizeGranularity )
14269 && ( lineWidthGranularity == rhs.lineWidthGranularity )
14270 && ( strictLines == rhs.strictLines )
14271 && ( standardSampleLocations == rhs.standardSampleLocations )
14272 && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
14273 && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
14274 && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
14275 }
14276
14277 bool operator!=( PhysicalDeviceLimits const& rhs ) const
14278 {
14279 return !operator==( rhs );
14280 }
14281
14282 uint32_t maxImageDimension1D;
14283 uint32_t maxImageDimension2D;
14284 uint32_t maxImageDimension3D;
14285 uint32_t maxImageDimensionCube;
14286 uint32_t maxImageArrayLayers;
14287 uint32_t maxTexelBufferElements;
14288 uint32_t maxUniformBufferRange;
14289 uint32_t maxStorageBufferRange;
14290 uint32_t maxPushConstantsSize;
14291 uint32_t maxMemoryAllocationCount;
14292 uint32_t maxSamplerAllocationCount;
14293 DeviceSize bufferImageGranularity;
14294 DeviceSize sparseAddressSpaceSize;
14295 uint32_t maxBoundDescriptorSets;
14296 uint32_t maxPerStageDescriptorSamplers;
14297 uint32_t maxPerStageDescriptorUniformBuffers;
14298 uint32_t maxPerStageDescriptorStorageBuffers;
14299 uint32_t maxPerStageDescriptorSampledImages;
14300 uint32_t maxPerStageDescriptorStorageImages;
14301 uint32_t maxPerStageDescriptorInputAttachments;
14302 uint32_t maxPerStageResources;
14303 uint32_t maxDescriptorSetSamplers;
14304 uint32_t maxDescriptorSetUniformBuffers;
14305 uint32_t maxDescriptorSetUniformBuffersDynamic;
14306 uint32_t maxDescriptorSetStorageBuffers;
14307 uint32_t maxDescriptorSetStorageBuffersDynamic;
14308 uint32_t maxDescriptorSetSampledImages;
14309 uint32_t maxDescriptorSetStorageImages;
14310 uint32_t maxDescriptorSetInputAttachments;
14311 uint32_t maxVertexInputAttributes;
14312 uint32_t maxVertexInputBindings;
14313 uint32_t maxVertexInputAttributeOffset;
14314 uint32_t maxVertexInputBindingStride;
14315 uint32_t maxVertexOutputComponents;
14316 uint32_t maxTessellationGenerationLevel;
14317 uint32_t maxTessellationPatchSize;
14318 uint32_t maxTessellationControlPerVertexInputComponents;
14319 uint32_t maxTessellationControlPerVertexOutputComponents;
14320 uint32_t maxTessellationControlPerPatchOutputComponents;
14321 uint32_t maxTessellationControlTotalOutputComponents;
14322 uint32_t maxTessellationEvaluationInputComponents;
14323 uint32_t maxTessellationEvaluationOutputComponents;
14324 uint32_t maxGeometryShaderInvocations;
14325 uint32_t maxGeometryInputComponents;
14326 uint32_t maxGeometryOutputComponents;
14327 uint32_t maxGeometryOutputVertices;
14328 uint32_t maxGeometryTotalOutputComponents;
14329 uint32_t maxFragmentInputComponents;
14330 uint32_t maxFragmentOutputAttachments;
14331 uint32_t maxFragmentDualSrcAttachments;
14332 uint32_t maxFragmentCombinedOutputResources;
14333 uint32_t maxComputeSharedMemorySize;
14334 uint32_t maxComputeWorkGroupCount[3];
14335 uint32_t maxComputeWorkGroupInvocations;
14336 uint32_t maxComputeWorkGroupSize[3];
14337 uint32_t subPixelPrecisionBits;
14338 uint32_t subTexelPrecisionBits;
14339 uint32_t mipmapPrecisionBits;
14340 uint32_t maxDrawIndexedIndexValue;
14341 uint32_t maxDrawIndirectCount;
14342 float maxSamplerLodBias;
14343 float maxSamplerAnisotropy;
14344 uint32_t maxViewports;
14345 uint32_t maxViewportDimensions[2];
14346 float viewportBoundsRange[2];
14347 uint32_t viewportSubPixelBits;
14348 size_t minMemoryMapAlignment;
14349 DeviceSize minTexelBufferOffsetAlignment;
14350 DeviceSize minUniformBufferOffsetAlignment;
14351 DeviceSize minStorageBufferOffsetAlignment;
14352 int32_t minTexelOffset;
14353 uint32_t maxTexelOffset;
14354 int32_t minTexelGatherOffset;
14355 uint32_t maxTexelGatherOffset;
14356 float minInterpolationOffset;
14357 float maxInterpolationOffset;
14358 uint32_t subPixelInterpolationOffsetBits;
14359 uint32_t maxFramebufferWidth;
14360 uint32_t maxFramebufferHeight;
14361 uint32_t maxFramebufferLayers;
14362 SampleCountFlags framebufferColorSampleCounts;
14363 SampleCountFlags framebufferDepthSampleCounts;
14364 SampleCountFlags framebufferStencilSampleCounts;
14365 SampleCountFlags framebufferNoAttachmentsSampleCounts;
14366 uint32_t maxColorAttachments;
14367 SampleCountFlags sampledImageColorSampleCounts;
14368 SampleCountFlags sampledImageIntegerSampleCounts;
14369 SampleCountFlags sampledImageDepthSampleCounts;
14370 SampleCountFlags sampledImageStencilSampleCounts;
14371 SampleCountFlags storageImageSampleCounts;
14372 uint32_t maxSampleMaskWords;
14373 Bool32 timestampComputeAndGraphics;
14374 float timestampPeriod;
14375 uint32_t maxClipDistances;
14376 uint32_t maxCullDistances;
14377 uint32_t maxCombinedClipAndCullDistances;
14378 uint32_t discreteQueuePriorities;
14379 float pointSizeRange[2];
14380 float lineWidthRange[2];
14381 float pointSizeGranularity;
14382 float lineWidthGranularity;
14383 Bool32 strictLines;
14384 Bool32 standardSampleLocations;
14385 DeviceSize optimalBufferCopyOffsetAlignment;
14386 DeviceSize optimalBufferCopyRowPitchAlignment;
14387 DeviceSize nonCoherentAtomSize;
14388 };
14389 static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
14390
14391 struct PhysicalDeviceProperties
14392 {
14393 operator const VkPhysicalDeviceProperties&() const
14394 {
14395 return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
14396 }
14397
14398 bool operator==( PhysicalDeviceProperties const& rhs ) const
14399 {
14400 return ( apiVersion == rhs.apiVersion )
14401 && ( driverVersion == rhs.driverVersion )
14402 && ( vendorID == rhs.vendorID )
14403 && ( deviceID == rhs.deviceID )
14404 && ( deviceType == rhs.deviceType )
14405 && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
14406 && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
14407 && ( limits == rhs.limits )
14408 && ( sparseProperties == rhs.sparseProperties );
14409 }
14410
14411 bool operator!=( PhysicalDeviceProperties const& rhs ) const
14412 {
14413 return !operator==( rhs );
14414 }
14415
14416 uint32_t apiVersion;
14417 uint32_t driverVersion;
14418 uint32_t vendorID;
14419 uint32_t deviceID;
14420 PhysicalDeviceType deviceType;
14421 char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
14422 uint8_t pipelineCacheUUID[VK_UUID_SIZE];
14423 PhysicalDeviceLimits limits;
14424 PhysicalDeviceSparseProperties sparseProperties;
14425 };
14426 static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
14427
Mark Youngb5f087a2017-01-19 21:10:49 -070014428 struct PhysicalDeviceProperties2KHR
14429 {
14430 operator const VkPhysicalDeviceProperties2KHR&() const
14431 {
14432 return *reinterpret_cast<const VkPhysicalDeviceProperties2KHR*>(this);
14433 }
14434
14435 bool operator==( PhysicalDeviceProperties2KHR const& rhs ) const
14436 {
14437 return ( sType == rhs.sType )
14438 && ( pNext == rhs.pNext )
14439 && ( properties == rhs.properties );
14440 }
14441
14442 bool operator!=( PhysicalDeviceProperties2KHR const& rhs ) const
14443 {
14444 return !operator==( rhs );
14445 }
14446
14447 private:
14448 StructureType sType;
14449
14450 public:
14451 void* pNext;
14452 PhysicalDeviceProperties properties;
14453 };
14454 static_assert( sizeof( PhysicalDeviceProperties2KHR ) == sizeof( VkPhysicalDeviceProperties2KHR ), "struct and wrapper have different size!" );
14455
14456 struct ImageFormatProperties2KHR
14457 {
14458 operator const VkImageFormatProperties2KHR&() const
14459 {
14460 return *reinterpret_cast<const VkImageFormatProperties2KHR*>(this);
14461 }
14462
14463 bool operator==( ImageFormatProperties2KHR const& rhs ) const
14464 {
14465 return ( sType == rhs.sType )
14466 && ( pNext == rhs.pNext )
14467 && ( imageFormatProperties == rhs.imageFormatProperties );
14468 }
14469
14470 bool operator!=( ImageFormatProperties2KHR const& rhs ) const
14471 {
14472 return !operator==( rhs );
14473 }
14474
14475 private:
14476 StructureType sType;
14477
14478 public:
14479 void* pNext;
14480 ImageFormatProperties imageFormatProperties;
14481 };
14482 static_assert( sizeof( ImageFormatProperties2KHR ) == sizeof( VkImageFormatProperties2KHR ), "struct and wrapper have different size!" );
14483
14484 struct PhysicalDeviceSparseImageFormatInfo2KHR
14485 {
14486 PhysicalDeviceSparseImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageUsageFlags usage_ = ImageUsageFlags(), ImageTiling tiling_ = ImageTiling::eOptimal )
14487 : sType( StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR )
14488 , pNext( nullptr )
14489 , format( format_ )
14490 , type( type_ )
14491 , samples( samples_ )
14492 , usage( usage_ )
14493 , tiling( tiling_ )
14494 {
14495 }
14496
14497 PhysicalDeviceSparseImageFormatInfo2KHR( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
14498 {
14499 memcpy( this, &rhs, sizeof(PhysicalDeviceSparseImageFormatInfo2KHR) );
14500 }
14501
14502 PhysicalDeviceSparseImageFormatInfo2KHR& operator=( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
14503 {
14504 memcpy( this, &rhs, sizeof(PhysicalDeviceSparseImageFormatInfo2KHR) );
14505 return *this;
14506 }
14507
14508 PhysicalDeviceSparseImageFormatInfo2KHR& setSType( StructureType sType_ )
14509 {
14510 sType = sType_;
14511 return *this;
14512 }
14513
14514 PhysicalDeviceSparseImageFormatInfo2KHR& setPNext( const void* pNext_ )
14515 {
14516 pNext = pNext_;
14517 return *this;
14518 }
14519
14520 PhysicalDeviceSparseImageFormatInfo2KHR& setFormat( Format format_ )
14521 {
14522 format = format_;
14523 return *this;
14524 }
14525
14526 PhysicalDeviceSparseImageFormatInfo2KHR& setType( ImageType type_ )
14527 {
14528 type = type_;
14529 return *this;
14530 }
14531
14532 PhysicalDeviceSparseImageFormatInfo2KHR& setSamples( SampleCountFlagBits samples_ )
14533 {
14534 samples = samples_;
14535 return *this;
14536 }
14537
14538 PhysicalDeviceSparseImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
14539 {
14540 usage = usage_;
14541 return *this;
14542 }
14543
14544 PhysicalDeviceSparseImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
14545 {
14546 tiling = tiling_;
14547 return *this;
14548 }
14549
14550 operator const VkPhysicalDeviceSparseImageFormatInfo2KHR&() const
14551 {
14552 return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>(this);
14553 }
14554
14555 bool operator==( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
14556 {
14557 return ( sType == rhs.sType )
14558 && ( pNext == rhs.pNext )
14559 && ( format == rhs.format )
14560 && ( type == rhs.type )
14561 && ( samples == rhs.samples )
14562 && ( usage == rhs.usage )
14563 && ( tiling == rhs.tiling );
14564 }
14565
14566 bool operator!=( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
14567 {
14568 return !operator==( rhs );
14569 }
14570
14571 private:
14572 StructureType sType;
14573
14574 public:
14575 const void* pNext;
14576 Format format;
14577 ImageType type;
14578 SampleCountFlagBits samples;
14579 ImageUsageFlags usage;
14580 ImageTiling tiling;
14581 };
14582 static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2KHR ), "struct and wrapper have different size!" );
14583
Lenny Komowb0a17f22016-08-11 11:23:15 -060014584 enum class AttachmentDescriptionFlagBits
14585 {
14586 eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
14587 };
14588
14589 using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
14590
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014591 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060014592 {
14593 return AttachmentDescriptionFlags( bit0 ) | bit1;
14594 }
14595
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014596 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
14597 {
14598 return ~( AttachmentDescriptionFlags( bits ) );
14599 }
14600
14601 template <> struct FlagTraits<AttachmentDescriptionFlagBits>
14602 {
14603 enum
14604 {
14605 allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
14606 };
14607 };
14608
Lenny Komowb0a17f22016-08-11 11:23:15 -060014609 struct AttachmentDescription
14610 {
14611 AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
14612 : flags( flags_ )
14613 , format( format_ )
14614 , samples( samples_ )
14615 , loadOp( loadOp_ )
14616 , storeOp( storeOp_ )
14617 , stencilLoadOp( stencilLoadOp_ )
14618 , stencilStoreOp( stencilStoreOp_ )
14619 , initialLayout( initialLayout_ )
14620 , finalLayout( finalLayout_ )
14621 {
14622 }
14623
14624 AttachmentDescription( VkAttachmentDescription const & rhs )
14625 {
14626 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14627 }
14628
14629 AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
14630 {
14631 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14632 return *this;
14633 }
14634
14635 AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
14636 {
14637 flags = flags_;
14638 return *this;
14639 }
14640
14641 AttachmentDescription& setFormat( Format format_ )
14642 {
14643 format = format_;
14644 return *this;
14645 }
14646
14647 AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
14648 {
14649 samples = samples_;
14650 return *this;
14651 }
14652
14653 AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
14654 {
14655 loadOp = loadOp_;
14656 return *this;
14657 }
14658
14659 AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
14660 {
14661 storeOp = storeOp_;
14662 return *this;
14663 }
14664
14665 AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
14666 {
14667 stencilLoadOp = stencilLoadOp_;
14668 return *this;
14669 }
14670
14671 AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
14672 {
14673 stencilStoreOp = stencilStoreOp_;
14674 return *this;
14675 }
14676
14677 AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
14678 {
14679 initialLayout = initialLayout_;
14680 return *this;
14681 }
14682
14683 AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
14684 {
14685 finalLayout = finalLayout_;
14686 return *this;
14687 }
14688
14689 operator const VkAttachmentDescription&() const
14690 {
14691 return *reinterpret_cast<const VkAttachmentDescription*>(this);
14692 }
14693
14694 bool operator==( AttachmentDescription const& rhs ) const
14695 {
14696 return ( flags == rhs.flags )
14697 && ( format == rhs.format )
14698 && ( samples == rhs.samples )
14699 && ( loadOp == rhs.loadOp )
14700 && ( storeOp == rhs.storeOp )
14701 && ( stencilLoadOp == rhs.stencilLoadOp )
14702 && ( stencilStoreOp == rhs.stencilStoreOp )
14703 && ( initialLayout == rhs.initialLayout )
14704 && ( finalLayout == rhs.finalLayout );
14705 }
14706
14707 bool operator!=( AttachmentDescription const& rhs ) const
14708 {
14709 return !operator==( rhs );
14710 }
14711
14712 AttachmentDescriptionFlags flags;
14713 Format format;
14714 SampleCountFlagBits samples;
14715 AttachmentLoadOp loadOp;
14716 AttachmentStoreOp storeOp;
14717 AttachmentLoadOp stencilLoadOp;
14718 AttachmentStoreOp stencilStoreOp;
14719 ImageLayout initialLayout;
14720 ImageLayout finalLayout;
14721 };
14722 static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
14723
14724 enum class StencilFaceFlagBits
14725 {
14726 eFront = VK_STENCIL_FACE_FRONT_BIT,
14727 eBack = VK_STENCIL_FACE_BACK_BIT,
14728 eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
14729 };
14730
14731 using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
14732
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014733 VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060014734 {
14735 return StencilFaceFlags( bit0 ) | bit1;
14736 }
14737
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014738 VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
14739 {
14740 return ~( StencilFaceFlags( bits ) );
14741 }
14742
14743 template <> struct FlagTraits<StencilFaceFlagBits>
14744 {
14745 enum
14746 {
14747 allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
14748 };
14749 };
14750
Lenny Komowb0a17f22016-08-11 11:23:15 -060014751 enum class DescriptorPoolCreateFlagBits
14752 {
14753 eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
14754 };
14755
14756 using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
14757
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014758 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060014759 {
14760 return DescriptorPoolCreateFlags( bit0 ) | bit1;
14761 }
14762
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014763 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
14764 {
14765 return ~( DescriptorPoolCreateFlags( bits ) );
14766 }
14767
14768 template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
14769 {
14770 enum
14771 {
14772 allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
14773 };
14774 };
14775
Lenny Komowb0a17f22016-08-11 11:23:15 -060014776 struct DescriptorPoolCreateInfo
14777 {
14778 DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
14779 : sType( StructureType::eDescriptorPoolCreateInfo )
14780 , pNext( nullptr )
14781 , flags( flags_ )
14782 , maxSets( maxSets_ )
14783 , poolSizeCount( poolSizeCount_ )
14784 , pPoolSizes( pPoolSizes_ )
14785 {
14786 }
14787
14788 DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
14789 {
14790 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14791 }
14792
14793 DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
14794 {
14795 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14796 return *this;
14797 }
14798
14799 DescriptorPoolCreateInfo& setSType( StructureType sType_ )
14800 {
14801 sType = sType_;
14802 return *this;
14803 }
14804
14805 DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
14806 {
14807 pNext = pNext_;
14808 return *this;
14809 }
14810
14811 DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
14812 {
14813 flags = flags_;
14814 return *this;
14815 }
14816
14817 DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
14818 {
14819 maxSets = maxSets_;
14820 return *this;
14821 }
14822
14823 DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
14824 {
14825 poolSizeCount = poolSizeCount_;
14826 return *this;
14827 }
14828
14829 DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
14830 {
14831 pPoolSizes = pPoolSizes_;
14832 return *this;
14833 }
14834
14835 operator const VkDescriptorPoolCreateInfo&() const
14836 {
14837 return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
14838 }
14839
14840 bool operator==( DescriptorPoolCreateInfo const& rhs ) const
14841 {
14842 return ( sType == rhs.sType )
14843 && ( pNext == rhs.pNext )
14844 && ( flags == rhs.flags )
14845 && ( maxSets == rhs.maxSets )
14846 && ( poolSizeCount == rhs.poolSizeCount )
14847 && ( pPoolSizes == rhs.pPoolSizes );
14848 }
14849
14850 bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
14851 {
14852 return !operator==( rhs );
14853 }
14854
14855 private:
14856 StructureType sType;
14857
14858 public:
14859 const void* pNext;
14860 DescriptorPoolCreateFlags flags;
14861 uint32_t maxSets;
14862 uint32_t poolSizeCount;
14863 const DescriptorPoolSize* pPoolSizes;
14864 };
14865 static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
14866
14867 enum class DependencyFlagBits
14868 {
14869 eByRegion = VK_DEPENDENCY_BY_REGION_BIT
14870 };
14871
14872 using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
14873
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014874 VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
Lenny Komowb0a17f22016-08-11 11:23:15 -060014875 {
14876 return DependencyFlags( bit0 ) | bit1;
14877 }
14878
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070014879 VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
14880 {
14881 return ~( DependencyFlags( bits ) );
14882 }
14883
14884 template <> struct FlagTraits<DependencyFlagBits>
14885 {
14886 enum
14887 {
14888 allFlags = VkFlags(DependencyFlagBits::eByRegion)
14889 };
14890 };
14891
14892 struct SubpassDependency
14893 {
14894 SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
14895 : srcSubpass( srcSubpass_ )
14896 , dstSubpass( dstSubpass_ )
14897 , srcStageMask( srcStageMask_ )
14898 , dstStageMask( dstStageMask_ )
14899 , srcAccessMask( srcAccessMask_ )
14900 , dstAccessMask( dstAccessMask_ )
14901 , dependencyFlags( dependencyFlags_ )
14902 {
14903 }
14904
14905 SubpassDependency( VkSubpassDependency const & rhs )
14906 {
14907 memcpy( this, &rhs, sizeof(SubpassDependency) );
14908 }
14909
14910 SubpassDependency& operator=( VkSubpassDependency const & rhs )
14911 {
14912 memcpy( this, &rhs, sizeof(SubpassDependency) );
14913 return *this;
14914 }
14915
14916 SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
14917 {
14918 srcSubpass = srcSubpass_;
14919 return *this;
14920 }
14921
14922 SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
14923 {
14924 dstSubpass = dstSubpass_;
14925 return *this;
14926 }
14927
14928 SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
14929 {
14930 srcStageMask = srcStageMask_;
14931 return *this;
14932 }
14933
14934 SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
14935 {
14936 dstStageMask = dstStageMask_;
14937 return *this;
14938 }
14939
14940 SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
14941 {
14942 srcAccessMask = srcAccessMask_;
14943 return *this;
14944 }
14945
14946 SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
14947 {
14948 dstAccessMask = dstAccessMask_;
14949 return *this;
14950 }
14951
14952 SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
14953 {
14954 dependencyFlags = dependencyFlags_;
14955 return *this;
14956 }
14957
14958 operator const VkSubpassDependency&() const
14959 {
14960 return *reinterpret_cast<const VkSubpassDependency*>(this);
14961 }
14962
14963 bool operator==( SubpassDependency const& rhs ) const
14964 {
14965 return ( srcSubpass == rhs.srcSubpass )
14966 && ( dstSubpass == rhs.dstSubpass )
14967 && ( srcStageMask == rhs.srcStageMask )
14968 && ( dstStageMask == rhs.dstStageMask )
14969 && ( srcAccessMask == rhs.srcAccessMask )
14970 && ( dstAccessMask == rhs.dstAccessMask )
14971 && ( dependencyFlags == rhs.dependencyFlags );
14972 }
14973
14974 bool operator!=( SubpassDependency const& rhs ) const
14975 {
14976 return !operator==( rhs );
14977 }
14978
14979 uint32_t srcSubpass;
14980 uint32_t dstSubpass;
14981 PipelineStageFlags srcStageMask;
14982 PipelineStageFlags dstStageMask;
14983 AccessFlags srcAccessMask;
14984 AccessFlags dstAccessMask;
14985 DependencyFlags dependencyFlags;
14986 };
14987 static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
14988
14989 struct RenderPassCreateInfo
14990 {
14991 RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
14992 : sType( StructureType::eRenderPassCreateInfo )
14993 , pNext( nullptr )
14994 , flags( flags_ )
14995 , attachmentCount( attachmentCount_ )
14996 , pAttachments( pAttachments_ )
14997 , subpassCount( subpassCount_ )
14998 , pSubpasses( pSubpasses_ )
14999 , dependencyCount( dependencyCount_ )
15000 , pDependencies( pDependencies_ )
15001 {
15002 }
15003
15004 RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
15005 {
15006 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
15007 }
15008
15009 RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
15010 {
15011 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
15012 return *this;
15013 }
15014
15015 RenderPassCreateInfo& setSType( StructureType sType_ )
15016 {
15017 sType = sType_;
15018 return *this;
15019 }
15020
15021 RenderPassCreateInfo& setPNext( const void* pNext_ )
15022 {
15023 pNext = pNext_;
15024 return *this;
15025 }
15026
15027 RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
15028 {
15029 flags = flags_;
15030 return *this;
15031 }
15032
15033 RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
15034 {
15035 attachmentCount = attachmentCount_;
15036 return *this;
15037 }
15038
15039 RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
15040 {
15041 pAttachments = pAttachments_;
15042 return *this;
15043 }
15044
15045 RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
15046 {
15047 subpassCount = subpassCount_;
15048 return *this;
15049 }
15050
15051 RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
15052 {
15053 pSubpasses = pSubpasses_;
15054 return *this;
15055 }
15056
15057 RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
15058 {
15059 dependencyCount = dependencyCount_;
15060 return *this;
15061 }
15062
15063 RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
15064 {
15065 pDependencies = pDependencies_;
15066 return *this;
15067 }
15068
15069 operator const VkRenderPassCreateInfo&() const
15070 {
15071 return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
15072 }
15073
15074 bool operator==( RenderPassCreateInfo const& rhs ) const
15075 {
15076 return ( sType == rhs.sType )
15077 && ( pNext == rhs.pNext )
15078 && ( flags == rhs.flags )
15079 && ( attachmentCount == rhs.attachmentCount )
15080 && ( pAttachments == rhs.pAttachments )
15081 && ( subpassCount == rhs.subpassCount )
15082 && ( pSubpasses == rhs.pSubpasses )
15083 && ( dependencyCount == rhs.dependencyCount )
15084 && ( pDependencies == rhs.pDependencies );
15085 }
15086
15087 bool operator!=( RenderPassCreateInfo const& rhs ) const
15088 {
15089 return !operator==( rhs );
15090 }
15091
15092 private:
15093 StructureType sType;
15094
15095 public:
15096 const void* pNext;
15097 RenderPassCreateFlags flags;
15098 uint32_t attachmentCount;
15099 const AttachmentDescription* pAttachments;
15100 uint32_t subpassCount;
15101 const SubpassDescription* pSubpasses;
15102 uint32_t dependencyCount;
15103 const SubpassDependency* pDependencies;
15104 };
15105 static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
15106
15107 enum class PresentModeKHR
15108 {
15109 eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
15110 eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
15111 eFifo = VK_PRESENT_MODE_FIFO_KHR,
15112 eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
15113 };
15114
15115 enum class ColorSpaceKHR
15116 {
Mark Youngb5f087a2017-01-19 21:10:49 -070015117 eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
15118 eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
15119 eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
15120 eScrgbLinearEXT = VK_COLOR_SPACE_SCRGB_LINEAR_EXT,
15121 eScrgbNonlinearEXT = VK_COLOR_SPACE_SCRGB_NONLINEAR_EXT,
15122 eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT,
15123 eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
15124 eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
15125 eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
15126 eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
15127 eBt2020NonlinearEXT = VK_COLOR_SPACE_BT2020_NONLINEAR_EXT,
15128 eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
15129 eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070015130 };
15131
15132 struct SurfaceFormatKHR
15133 {
15134 operator const VkSurfaceFormatKHR&() const
15135 {
15136 return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
15137 }
15138
15139 bool operator==( SurfaceFormatKHR const& rhs ) const
15140 {
15141 return ( format == rhs.format )
15142 && ( colorSpace == rhs.colorSpace );
15143 }
15144
15145 bool operator!=( SurfaceFormatKHR const& rhs ) const
15146 {
15147 return !operator==( rhs );
15148 }
15149
15150 Format format;
15151 ColorSpaceKHR colorSpace;
15152 };
15153 static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
15154
15155 enum class DisplayPlaneAlphaFlagBitsKHR
15156 {
15157 eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
15158 eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
15159 ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
15160 ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
15161 };
15162
15163 using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
15164
15165 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
15166 {
15167 return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
15168 }
15169
15170 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
15171 {
15172 return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
15173 }
15174
15175 template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
15176 {
15177 enum
15178 {
15179 allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
15180 };
15181 };
15182
15183 struct DisplayPlaneCapabilitiesKHR
15184 {
15185 operator const VkDisplayPlaneCapabilitiesKHR&() const
15186 {
15187 return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
15188 }
15189
15190 bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
15191 {
15192 return ( supportedAlpha == rhs.supportedAlpha )
15193 && ( minSrcPosition == rhs.minSrcPosition )
15194 && ( maxSrcPosition == rhs.maxSrcPosition )
15195 && ( minSrcExtent == rhs.minSrcExtent )
15196 && ( maxSrcExtent == rhs.maxSrcExtent )
15197 && ( minDstPosition == rhs.minDstPosition )
15198 && ( maxDstPosition == rhs.maxDstPosition )
15199 && ( minDstExtent == rhs.minDstExtent )
15200 && ( maxDstExtent == rhs.maxDstExtent );
15201 }
15202
15203 bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
15204 {
15205 return !operator==( rhs );
15206 }
15207
15208 DisplayPlaneAlphaFlagsKHR supportedAlpha;
15209 Offset2D minSrcPosition;
15210 Offset2D maxSrcPosition;
15211 Extent2D minSrcExtent;
15212 Extent2D maxSrcExtent;
15213 Offset2D minDstPosition;
15214 Offset2D maxDstPosition;
15215 Extent2D minDstExtent;
15216 Extent2D maxDstExtent;
15217 };
15218 static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
15219
15220 enum class CompositeAlphaFlagBitsKHR
15221 {
15222 eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
15223 ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
15224 ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
15225 eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
15226 };
15227
15228 using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
15229
15230 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
15231 {
15232 return CompositeAlphaFlagsKHR( bit0 ) | bit1;
15233 }
15234
15235 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
15236 {
15237 return ~( CompositeAlphaFlagsKHR( bits ) );
15238 }
15239
15240 template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
15241 {
15242 enum
15243 {
15244 allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
15245 };
15246 };
15247
15248 enum class SurfaceTransformFlagBitsKHR
15249 {
15250 eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
15251 eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
15252 eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
15253 eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
15254 eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
15255 eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
15256 eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
15257 eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
15258 eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
15259 };
15260
15261 using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
15262
15263 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
15264 {
15265 return SurfaceTransformFlagsKHR( bit0 ) | bit1;
15266 }
15267
15268 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
15269 {
15270 return ~( SurfaceTransformFlagsKHR( bits ) );
15271 }
15272
15273 template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
15274 {
15275 enum
15276 {
15277 allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
15278 };
15279 };
15280
15281 struct DisplayPropertiesKHR
15282 {
15283 operator const VkDisplayPropertiesKHR&() const
15284 {
15285 return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
15286 }
15287
15288 bool operator==( DisplayPropertiesKHR const& rhs ) const
15289 {
15290 return ( display == rhs.display )
15291 && ( displayName == rhs.displayName )
15292 && ( physicalDimensions == rhs.physicalDimensions )
15293 && ( physicalResolution == rhs.physicalResolution )
15294 && ( supportedTransforms == rhs.supportedTransforms )
15295 && ( planeReorderPossible == rhs.planeReorderPossible )
15296 && ( persistentContent == rhs.persistentContent );
15297 }
15298
15299 bool operator!=( DisplayPropertiesKHR const& rhs ) const
15300 {
15301 return !operator==( rhs );
15302 }
15303
15304 DisplayKHR display;
15305 const char* displayName;
15306 Extent2D physicalDimensions;
15307 Extent2D physicalResolution;
15308 SurfaceTransformFlagsKHR supportedTransforms;
15309 Bool32 planeReorderPossible;
15310 Bool32 persistentContent;
15311 };
15312 static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
15313
15314 struct DisplaySurfaceCreateInfoKHR
15315 {
15316 DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
15317 : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
15318 , pNext( nullptr )
15319 , flags( flags_ )
15320 , displayMode( displayMode_ )
15321 , planeIndex( planeIndex_ )
15322 , planeStackIndex( planeStackIndex_ )
15323 , transform( transform_ )
15324 , globalAlpha( globalAlpha_ )
15325 , alphaMode( alphaMode_ )
15326 , imageExtent( imageExtent_ )
15327 {
15328 }
15329
15330 DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
15331 {
15332 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
15333 }
15334
15335 DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
15336 {
15337 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
15338 return *this;
15339 }
15340
15341 DisplaySurfaceCreateInfoKHR& setSType( StructureType sType_ )
15342 {
15343 sType = sType_;
15344 return *this;
15345 }
15346
15347 DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
15348 {
15349 pNext = pNext_;
15350 return *this;
15351 }
15352
15353 DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
15354 {
15355 flags = flags_;
15356 return *this;
15357 }
15358
15359 DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
15360 {
15361 displayMode = displayMode_;
15362 return *this;
15363 }
15364
15365 DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
15366 {
15367 planeIndex = planeIndex_;
15368 return *this;
15369 }
15370
15371 DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
15372 {
15373 planeStackIndex = planeStackIndex_;
15374 return *this;
15375 }
15376
15377 DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
15378 {
15379 transform = transform_;
15380 return *this;
15381 }
15382
15383 DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
15384 {
15385 globalAlpha = globalAlpha_;
15386 return *this;
15387 }
15388
15389 DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
15390 {
15391 alphaMode = alphaMode_;
15392 return *this;
15393 }
15394
15395 DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
15396 {
15397 imageExtent = imageExtent_;
15398 return *this;
15399 }
15400
15401 operator const VkDisplaySurfaceCreateInfoKHR&() const
15402 {
15403 return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
15404 }
15405
15406 bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
15407 {
15408 return ( sType == rhs.sType )
15409 && ( pNext == rhs.pNext )
15410 && ( flags == rhs.flags )
15411 && ( displayMode == rhs.displayMode )
15412 && ( planeIndex == rhs.planeIndex )
15413 && ( planeStackIndex == rhs.planeStackIndex )
15414 && ( transform == rhs.transform )
15415 && ( globalAlpha == rhs.globalAlpha )
15416 && ( alphaMode == rhs.alphaMode )
15417 && ( imageExtent == rhs.imageExtent );
15418 }
15419
15420 bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
15421 {
15422 return !operator==( rhs );
15423 }
15424
15425 private:
15426 StructureType sType;
15427
15428 public:
15429 const void* pNext;
15430 DisplaySurfaceCreateFlagsKHR flags;
15431 DisplayModeKHR displayMode;
15432 uint32_t planeIndex;
15433 uint32_t planeStackIndex;
15434 SurfaceTransformFlagBitsKHR transform;
15435 float globalAlpha;
15436 DisplayPlaneAlphaFlagBitsKHR alphaMode;
15437 Extent2D imageExtent;
15438 };
15439 static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
15440
15441 struct SurfaceCapabilitiesKHR
15442 {
15443 operator const VkSurfaceCapabilitiesKHR&() const
15444 {
15445 return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
15446 }
15447
15448 bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
15449 {
15450 return ( minImageCount == rhs.minImageCount )
15451 && ( maxImageCount == rhs.maxImageCount )
15452 && ( currentExtent == rhs.currentExtent )
15453 && ( minImageExtent == rhs.minImageExtent )
15454 && ( maxImageExtent == rhs.maxImageExtent )
15455 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
15456 && ( supportedTransforms == rhs.supportedTransforms )
15457 && ( currentTransform == rhs.currentTransform )
15458 && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
15459 && ( supportedUsageFlags == rhs.supportedUsageFlags );
15460 }
15461
15462 bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
15463 {
15464 return !operator==( rhs );
15465 }
15466
15467 uint32_t minImageCount;
15468 uint32_t maxImageCount;
15469 Extent2D currentExtent;
15470 Extent2D minImageExtent;
15471 Extent2D maxImageExtent;
15472 uint32_t maxImageArrayLayers;
15473 SurfaceTransformFlagsKHR supportedTransforms;
15474 SurfaceTransformFlagBitsKHR currentTransform;
15475 CompositeAlphaFlagsKHR supportedCompositeAlpha;
15476 ImageUsageFlags supportedUsageFlags;
15477 };
15478 static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
15479
15480 struct SwapchainCreateInfoKHR
15481 {
15482 SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
15483 : sType( StructureType::eSwapchainCreateInfoKHR )
15484 , pNext( nullptr )
15485 , flags( flags_ )
15486 , surface( surface_ )
15487 , minImageCount( minImageCount_ )
15488 , imageFormat( imageFormat_ )
15489 , imageColorSpace( imageColorSpace_ )
15490 , imageExtent( imageExtent_ )
15491 , imageArrayLayers( imageArrayLayers_ )
15492 , imageUsage( imageUsage_ )
15493 , imageSharingMode( imageSharingMode_ )
15494 , queueFamilyIndexCount( queueFamilyIndexCount_ )
15495 , pQueueFamilyIndices( pQueueFamilyIndices_ )
15496 , preTransform( preTransform_ )
15497 , compositeAlpha( compositeAlpha_ )
15498 , presentMode( presentMode_ )
15499 , clipped( clipped_ )
15500 , oldSwapchain( oldSwapchain_ )
15501 {
15502 }
15503
15504 SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
15505 {
15506 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
15507 }
15508
15509 SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
15510 {
15511 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
15512 return *this;
15513 }
15514
15515 SwapchainCreateInfoKHR& setSType( StructureType sType_ )
15516 {
15517 sType = sType_;
15518 return *this;
15519 }
15520
15521 SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
15522 {
15523 pNext = pNext_;
15524 return *this;
15525 }
15526
15527 SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
15528 {
15529 flags = flags_;
15530 return *this;
15531 }
15532
15533 SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
15534 {
15535 surface = surface_;
15536 return *this;
15537 }
15538
15539 SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
15540 {
15541 minImageCount = minImageCount_;
15542 return *this;
15543 }
15544
15545 SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
15546 {
15547 imageFormat = imageFormat_;
15548 return *this;
15549 }
15550
15551 SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
15552 {
15553 imageColorSpace = imageColorSpace_;
15554 return *this;
15555 }
15556
15557 SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
15558 {
15559 imageExtent = imageExtent_;
15560 return *this;
15561 }
15562
15563 SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
15564 {
15565 imageArrayLayers = imageArrayLayers_;
15566 return *this;
15567 }
15568
15569 SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
15570 {
15571 imageUsage = imageUsage_;
15572 return *this;
15573 }
15574
15575 SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
15576 {
15577 imageSharingMode = imageSharingMode_;
15578 return *this;
15579 }
15580
15581 SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
15582 {
15583 queueFamilyIndexCount = queueFamilyIndexCount_;
15584 return *this;
15585 }
15586
15587 SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
15588 {
15589 pQueueFamilyIndices = pQueueFamilyIndices_;
15590 return *this;
15591 }
15592
15593 SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
15594 {
15595 preTransform = preTransform_;
15596 return *this;
15597 }
15598
15599 SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
15600 {
15601 compositeAlpha = compositeAlpha_;
15602 return *this;
15603 }
15604
15605 SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
15606 {
15607 presentMode = presentMode_;
15608 return *this;
15609 }
15610
15611 SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
15612 {
15613 clipped = clipped_;
15614 return *this;
15615 }
15616
15617 SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
15618 {
15619 oldSwapchain = oldSwapchain_;
15620 return *this;
15621 }
15622
15623 operator const VkSwapchainCreateInfoKHR&() const
15624 {
15625 return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
15626 }
15627
15628 bool operator==( SwapchainCreateInfoKHR const& rhs ) const
15629 {
15630 return ( sType == rhs.sType )
15631 && ( pNext == rhs.pNext )
15632 && ( flags == rhs.flags )
15633 && ( surface == rhs.surface )
15634 && ( minImageCount == rhs.minImageCount )
15635 && ( imageFormat == rhs.imageFormat )
15636 && ( imageColorSpace == rhs.imageColorSpace )
15637 && ( imageExtent == rhs.imageExtent )
15638 && ( imageArrayLayers == rhs.imageArrayLayers )
15639 && ( imageUsage == rhs.imageUsage )
15640 && ( imageSharingMode == rhs.imageSharingMode )
15641 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
15642 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
15643 && ( preTransform == rhs.preTransform )
15644 && ( compositeAlpha == rhs.compositeAlpha )
15645 && ( presentMode == rhs.presentMode )
15646 && ( clipped == rhs.clipped )
15647 && ( oldSwapchain == rhs.oldSwapchain );
15648 }
15649
15650 bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
15651 {
15652 return !operator==( rhs );
15653 }
15654
15655 private:
15656 StructureType sType;
15657
15658 public:
15659 const void* pNext;
15660 SwapchainCreateFlagsKHR flags;
15661 SurfaceKHR surface;
15662 uint32_t minImageCount;
15663 Format imageFormat;
15664 ColorSpaceKHR imageColorSpace;
15665 Extent2D imageExtent;
15666 uint32_t imageArrayLayers;
15667 ImageUsageFlags imageUsage;
15668 SharingMode imageSharingMode;
15669 uint32_t queueFamilyIndexCount;
15670 const uint32_t* pQueueFamilyIndices;
15671 SurfaceTransformFlagBitsKHR preTransform;
15672 CompositeAlphaFlagBitsKHR compositeAlpha;
15673 PresentModeKHR presentMode;
15674 Bool32 clipped;
15675 SwapchainKHR oldSwapchain;
15676 };
15677 static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
15678
15679 enum class DebugReportFlagBitsEXT
15680 {
15681 eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
15682 eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
15683 ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
15684 eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
15685 eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
15686 };
15687
15688 using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
15689
15690 VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
15691 {
15692 return DebugReportFlagsEXT( bit0 ) | bit1;
15693 }
15694
15695 VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
15696 {
15697 return ~( DebugReportFlagsEXT( bits ) );
15698 }
15699
15700 template <> struct FlagTraits<DebugReportFlagBitsEXT>
15701 {
15702 enum
15703 {
15704 allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
15705 };
15706 };
15707
15708 struct DebugReportCallbackCreateInfoEXT
15709 {
15710 DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
15711 : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
15712 , pNext( nullptr )
15713 , flags( flags_ )
15714 , pfnCallback( pfnCallback_ )
15715 , pUserData( pUserData_ )
15716 {
15717 }
15718
15719 DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
15720 {
15721 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15722 }
15723
15724 DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
15725 {
15726 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15727 return *this;
15728 }
15729
15730 DebugReportCallbackCreateInfoEXT& setSType( StructureType sType_ )
15731 {
15732 sType = sType_;
15733 return *this;
15734 }
15735
15736 DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
15737 {
15738 pNext = pNext_;
15739 return *this;
15740 }
15741
15742 DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
15743 {
15744 flags = flags_;
15745 return *this;
15746 }
15747
15748 DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
15749 {
15750 pfnCallback = pfnCallback_;
15751 return *this;
15752 }
15753
15754 DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
15755 {
15756 pUserData = pUserData_;
15757 return *this;
15758 }
15759
15760 operator const VkDebugReportCallbackCreateInfoEXT&() const
15761 {
15762 return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
15763 }
15764
15765 bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
15766 {
15767 return ( sType == rhs.sType )
15768 && ( pNext == rhs.pNext )
15769 && ( flags == rhs.flags )
15770 && ( pfnCallback == rhs.pfnCallback )
15771 && ( pUserData == rhs.pUserData );
15772 }
15773
15774 bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
15775 {
15776 return !operator==( rhs );
15777 }
15778
15779 private:
15780 StructureType sType;
15781
15782 public:
15783 const void* pNext;
15784 DebugReportFlagsEXT flags;
15785 PFN_vkDebugReportCallbackEXT pfnCallback;
15786 void* pUserData;
15787 };
15788 static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
15789
15790 enum class DebugReportObjectTypeEXT
15791 {
15792 eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
15793 eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
15794 ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
15795 eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
15796 eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
15797 eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
15798 eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
15799 eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
15800 eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
15801 eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
15802 eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
15803 eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
15804 eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
15805 eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
15806 eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
15807 eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
15808 ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
15809 ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
15810 eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
15811 ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
15812 eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
15813 eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
15814 eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
15815 eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
15816 eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
15817 eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
15818 eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
15819 eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
15820 eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
15821 eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
15822 eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
15823 eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
15824 eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT
15825 };
15826
15827 struct DebugMarkerObjectNameInfoEXT
15828 {
15829 DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
15830 : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
15831 , pNext( nullptr )
15832 , objectType( objectType_ )
15833 , object( object_ )
15834 , pObjectName( pObjectName_ )
15835 {
15836 }
15837
15838 DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
15839 {
15840 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15841 }
15842
15843 DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
15844 {
15845 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15846 return *this;
15847 }
15848
15849 DebugMarkerObjectNameInfoEXT& setSType( StructureType sType_ )
15850 {
15851 sType = sType_;
15852 return *this;
15853 }
15854
15855 DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
15856 {
15857 pNext = pNext_;
15858 return *this;
15859 }
15860
15861 DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15862 {
15863 objectType = objectType_;
15864 return *this;
15865 }
15866
15867 DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
15868 {
15869 object = object_;
15870 return *this;
15871 }
15872
15873 DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
15874 {
15875 pObjectName = pObjectName_;
15876 return *this;
15877 }
15878
15879 operator const VkDebugMarkerObjectNameInfoEXT&() const
15880 {
15881 return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
15882 }
15883
15884 bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
15885 {
15886 return ( sType == rhs.sType )
15887 && ( pNext == rhs.pNext )
15888 && ( objectType == rhs.objectType )
15889 && ( object == rhs.object )
15890 && ( pObjectName == rhs.pObjectName );
15891 }
15892
15893 bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
15894 {
15895 return !operator==( rhs );
15896 }
15897
15898 private:
15899 StructureType sType;
15900
15901 public:
15902 const void* pNext;
15903 DebugReportObjectTypeEXT objectType;
15904 uint64_t object;
15905 const char* pObjectName;
15906 };
15907 static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
15908
15909 struct DebugMarkerObjectTagInfoEXT
15910 {
15911 DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
15912 : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
15913 , pNext( nullptr )
15914 , objectType( objectType_ )
15915 , object( object_ )
15916 , tagName( tagName_ )
15917 , tagSize( tagSize_ )
15918 , pTag( pTag_ )
15919 {
15920 }
15921
15922 DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
15923 {
15924 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15925 }
15926
15927 DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
15928 {
15929 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15930 return *this;
15931 }
15932
15933 DebugMarkerObjectTagInfoEXT& setSType( StructureType sType_ )
15934 {
15935 sType = sType_;
15936 return *this;
15937 }
15938
15939 DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
15940 {
15941 pNext = pNext_;
15942 return *this;
15943 }
15944
15945 DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15946 {
15947 objectType = objectType_;
15948 return *this;
15949 }
15950
15951 DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
15952 {
15953 object = object_;
15954 return *this;
15955 }
15956
15957 DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
15958 {
15959 tagName = tagName_;
15960 return *this;
15961 }
15962
15963 DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
15964 {
15965 tagSize = tagSize_;
15966 return *this;
15967 }
15968
15969 DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
15970 {
15971 pTag = pTag_;
15972 return *this;
15973 }
15974
15975 operator const VkDebugMarkerObjectTagInfoEXT&() const
15976 {
15977 return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
15978 }
15979
15980 bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
15981 {
15982 return ( sType == rhs.sType )
15983 && ( pNext == rhs.pNext )
15984 && ( objectType == rhs.objectType )
15985 && ( object == rhs.object )
15986 && ( tagName == rhs.tagName )
15987 && ( tagSize == rhs.tagSize )
15988 && ( pTag == rhs.pTag );
15989 }
15990
15991 bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
15992 {
15993 return !operator==( rhs );
15994 }
15995
15996 private:
15997 StructureType sType;
15998
15999 public:
16000 const void* pNext;
16001 DebugReportObjectTypeEXT objectType;
16002 uint64_t object;
16003 uint64_t tagName;
16004 size_t tagSize;
16005 const void* pTag;
16006 };
16007 static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
16008
16009 enum class DebugReportErrorEXT
16010 {
16011 eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
16012 eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
16013 };
16014
16015 enum class RasterizationOrderAMD
16016 {
16017 eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
16018 eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
16019 };
16020
16021 struct PipelineRasterizationStateRasterizationOrderAMD
16022 {
16023 PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
16024 : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
16025 , pNext( nullptr )
16026 , rasterizationOrder( rasterizationOrder_ )
16027 {
16028 }
16029
16030 PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
16031 {
16032 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
16033 }
16034
16035 PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
16036 {
16037 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
16038 return *this;
16039 }
16040
16041 PipelineRasterizationStateRasterizationOrderAMD& setSType( StructureType sType_ )
16042 {
16043 sType = sType_;
16044 return *this;
16045 }
16046
16047 PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
16048 {
16049 pNext = pNext_;
16050 return *this;
16051 }
16052
16053 PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
16054 {
16055 rasterizationOrder = rasterizationOrder_;
16056 return *this;
16057 }
16058
16059 operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
16060 {
16061 return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
16062 }
16063
16064 bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
16065 {
16066 return ( sType == rhs.sType )
16067 && ( pNext == rhs.pNext )
16068 && ( rasterizationOrder == rhs.rasterizationOrder );
16069 }
16070
16071 bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
16072 {
16073 return !operator==( rhs );
16074 }
16075
16076 private:
16077 StructureType sType;
16078
16079 public:
16080 const void* pNext;
16081 RasterizationOrderAMD rasterizationOrder;
16082 };
16083 static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
16084
16085 enum class ExternalMemoryHandleTypeFlagBitsNV
16086 {
16087 eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
16088 eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
16089 eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
16090 eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
16091 };
16092
16093 using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
16094
16095 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
16096 {
16097 return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
16098 }
16099
16100 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
16101 {
16102 return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
16103 }
16104
16105 template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
16106 {
16107 enum
16108 {
16109 allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
16110 };
16111 };
16112
16113 struct ExternalMemoryImageCreateInfoNV
16114 {
16115 ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
16116 : sType( StructureType::eExternalMemoryImageCreateInfoNV )
16117 , pNext( nullptr )
16118 , handleTypes( handleTypes_ )
16119 {
16120 }
16121
16122 ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
16123 {
16124 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
16125 }
16126
16127 ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
16128 {
16129 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
16130 return *this;
16131 }
16132
16133 ExternalMemoryImageCreateInfoNV& setSType( StructureType sType_ )
16134 {
16135 sType = sType_;
16136 return *this;
16137 }
16138
16139 ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
16140 {
16141 pNext = pNext_;
16142 return *this;
16143 }
16144
16145 ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
16146 {
16147 handleTypes = handleTypes_;
16148 return *this;
16149 }
16150
16151 operator const VkExternalMemoryImageCreateInfoNV&() const
16152 {
16153 return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
16154 }
16155
16156 bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
16157 {
16158 return ( sType == rhs.sType )
16159 && ( pNext == rhs.pNext )
16160 && ( handleTypes == rhs.handleTypes );
16161 }
16162
16163 bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
16164 {
16165 return !operator==( rhs );
16166 }
16167
16168 private:
16169 StructureType sType;
16170
16171 public:
16172 const void* pNext;
16173 ExternalMemoryHandleTypeFlagsNV handleTypes;
16174 };
16175 static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
16176
16177 struct ExportMemoryAllocateInfoNV
16178 {
16179 ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
16180 : sType( StructureType::eExportMemoryAllocateInfoNV )
16181 , pNext( nullptr )
16182 , handleTypes( handleTypes_ )
16183 {
16184 }
16185
16186 ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
16187 {
16188 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
16189 }
16190
16191 ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
16192 {
16193 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
16194 return *this;
16195 }
16196
16197 ExportMemoryAllocateInfoNV& setSType( StructureType sType_ )
16198 {
16199 sType = sType_;
16200 return *this;
16201 }
16202
16203 ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
16204 {
16205 pNext = pNext_;
16206 return *this;
16207 }
16208
16209 ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
16210 {
16211 handleTypes = handleTypes_;
16212 return *this;
16213 }
16214
16215 operator const VkExportMemoryAllocateInfoNV&() const
16216 {
16217 return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
16218 }
16219
16220 bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
16221 {
16222 return ( sType == rhs.sType )
16223 && ( pNext == rhs.pNext )
16224 && ( handleTypes == rhs.handleTypes );
16225 }
16226
16227 bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
16228 {
16229 return !operator==( rhs );
16230 }
16231
16232 private:
16233 StructureType sType;
16234
16235 public:
16236 const void* pNext;
16237 ExternalMemoryHandleTypeFlagsNV handleTypes;
16238 };
16239 static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
16240
16241#ifdef VK_USE_PLATFORM_WIN32_KHR
16242 struct ImportMemoryWin32HandleInfoNV
16243 {
16244 ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
16245 : sType( StructureType::eImportMemoryWin32HandleInfoNV )
16246 , pNext( nullptr )
16247 , handleType( handleType_ )
16248 , handle( handle_ )
16249 {
16250 }
16251
16252 ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
16253 {
16254 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
16255 }
16256
16257 ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
16258 {
16259 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
16260 return *this;
16261 }
16262
16263 ImportMemoryWin32HandleInfoNV& setSType( StructureType sType_ )
16264 {
16265 sType = sType_;
16266 return *this;
16267 }
16268
16269 ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
16270 {
16271 pNext = pNext_;
16272 return *this;
16273 }
16274
16275 ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
16276 {
16277 handleType = handleType_;
16278 return *this;
16279 }
16280
16281 ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
16282 {
16283 handle = handle_;
16284 return *this;
16285 }
16286
16287 operator const VkImportMemoryWin32HandleInfoNV&() const
16288 {
16289 return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
16290 }
16291
16292 bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
16293 {
16294 return ( sType == rhs.sType )
16295 && ( pNext == rhs.pNext )
16296 && ( handleType == rhs.handleType )
16297 && ( handle == rhs.handle );
16298 }
16299
16300 bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
16301 {
16302 return !operator==( rhs );
16303 }
16304
16305 private:
16306 StructureType sType;
16307
16308 public:
16309 const void* pNext;
16310 ExternalMemoryHandleTypeFlagsNV handleType;
16311 HANDLE handle;
16312 };
16313 static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
16314#endif /*VK_USE_PLATFORM_WIN32_KHR*/
16315
16316 enum class ExternalMemoryFeatureFlagBitsNV
16317 {
16318 eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
16319 eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
16320 eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
16321 };
16322
16323 using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
16324
16325 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
16326 {
16327 return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
16328 }
16329
16330 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
16331 {
16332 return ~( ExternalMemoryFeatureFlagsNV( bits ) );
16333 }
16334
16335 template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
16336 {
16337 enum
16338 {
16339 allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
16340 };
16341 };
16342
16343 struct ExternalImageFormatPropertiesNV
16344 {
16345 operator const VkExternalImageFormatPropertiesNV&() const
16346 {
16347 return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
16348 }
16349
16350 bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
16351 {
16352 return ( imageFormatProperties == rhs.imageFormatProperties )
16353 && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
16354 && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
16355 && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
16356 }
16357
16358 bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
16359 {
16360 return !operator==( rhs );
16361 }
16362
16363 ImageFormatProperties imageFormatProperties;
16364 ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
16365 ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
16366 ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
16367 };
16368 static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
16369
16370 enum class ValidationCheckEXT
16371 {
16372 eAll = VK_VALIDATION_CHECK_ALL_EXT
16373 };
16374
16375 struct ValidationFlagsEXT
16376 {
16377 ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
16378 : sType( StructureType::eValidationFlagsEXT )
16379 , pNext( nullptr )
16380 , disabledValidationCheckCount( disabledValidationCheckCount_ )
16381 , pDisabledValidationChecks( pDisabledValidationChecks_ )
16382 {
16383 }
16384
16385 ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
16386 {
16387 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
16388 }
16389
16390 ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
16391 {
16392 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
16393 return *this;
16394 }
16395
16396 ValidationFlagsEXT& setSType( StructureType sType_ )
16397 {
16398 sType = sType_;
16399 return *this;
16400 }
16401
16402 ValidationFlagsEXT& setPNext( const void* pNext_ )
16403 {
16404 pNext = pNext_;
16405 return *this;
16406 }
16407
16408 ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
16409 {
16410 disabledValidationCheckCount = disabledValidationCheckCount_;
16411 return *this;
16412 }
16413
16414 ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
16415 {
16416 pDisabledValidationChecks = pDisabledValidationChecks_;
16417 return *this;
16418 }
16419
16420 operator const VkValidationFlagsEXT&() const
16421 {
16422 return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
16423 }
16424
16425 bool operator==( ValidationFlagsEXT const& rhs ) const
16426 {
16427 return ( sType == rhs.sType )
16428 && ( pNext == rhs.pNext )
16429 && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
16430 && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
16431 }
16432
16433 bool operator!=( ValidationFlagsEXT const& rhs ) const
16434 {
16435 return !operator==( rhs );
16436 }
16437
16438 private:
16439 StructureType sType;
16440
16441 public:
16442 const void* pNext;
16443 uint32_t disabledValidationCheckCount;
16444 ValidationCheckEXT* pDisabledValidationChecks;
16445 };
16446 static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
16447
16448 enum class IndirectCommandsLayoutUsageFlagBitsNVX
16449 {
16450 eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
16451 eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
16452 eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
16453 eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
16454 };
16455
16456 using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
16457
16458 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
16459 {
16460 return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
16461 }
16462
16463 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
16464 {
16465 return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
16466 }
16467
16468 template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
16469 {
16470 enum
16471 {
16472 allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
16473 };
16474 };
16475
16476 enum class ObjectEntryUsageFlagBitsNVX
16477 {
16478 eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
16479 eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
16480 };
16481
16482 using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
16483
16484 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
16485 {
16486 return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
16487 }
16488
16489 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
16490 {
16491 return ~( ObjectEntryUsageFlagsNVX( bits ) );
16492 }
16493
16494 template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
16495 {
16496 enum
16497 {
16498 allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
16499 };
16500 };
16501
16502 enum class IndirectCommandsTokenTypeNVX
16503 {
16504 eVkIndirectCommandsTokenPipeline = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
16505 eVkIndirectCommandsTokenDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX,
16506 eVkIndirectCommandsTokenIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX,
16507 eVkIndirectCommandsTokenVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX,
16508 eVkIndirectCommandsTokenPushConstant = VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX,
16509 eVkIndirectCommandsTokenDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX,
16510 eVkIndirectCommandsTokenDraw = VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX,
16511 eVkIndirectCommandsTokenDispatch = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX
16512 };
16513
16514 struct IndirectCommandsTokenNVX
16515 {
16516 IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
16517 : tokenType( tokenType_ )
16518 , buffer( buffer_ )
16519 , offset( offset_ )
16520 {
16521 }
16522
16523 IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
16524 {
16525 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
16526 }
16527
16528 IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
16529 {
16530 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
16531 return *this;
16532 }
16533
16534 IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
16535 {
16536 tokenType = tokenType_;
16537 return *this;
16538 }
16539
16540 IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
16541 {
16542 buffer = buffer_;
16543 return *this;
16544 }
16545
16546 IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
16547 {
16548 offset = offset_;
16549 return *this;
16550 }
16551
16552 operator const VkIndirectCommandsTokenNVX&() const
16553 {
16554 return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
16555 }
16556
16557 bool operator==( IndirectCommandsTokenNVX const& rhs ) const
16558 {
16559 return ( tokenType == rhs.tokenType )
16560 && ( buffer == rhs.buffer )
16561 && ( offset == rhs.offset );
16562 }
16563
16564 bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
16565 {
16566 return !operator==( rhs );
16567 }
16568
16569 IndirectCommandsTokenTypeNVX tokenType;
16570 Buffer buffer;
16571 DeviceSize offset;
16572 };
16573 static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
16574
16575 struct IndirectCommandsLayoutTokenNVX
16576 {
16577 IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
16578 : tokenType( tokenType_ )
16579 , bindingUnit( bindingUnit_ )
16580 , dynamicCount( dynamicCount_ )
16581 , divisor( divisor_ )
16582 {
16583 }
16584
16585 IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
16586 {
16587 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16588 }
16589
16590 IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
16591 {
16592 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16593 return *this;
16594 }
16595
16596 IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
16597 {
16598 tokenType = tokenType_;
16599 return *this;
16600 }
16601
16602 IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
16603 {
16604 bindingUnit = bindingUnit_;
16605 return *this;
16606 }
16607
16608 IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
16609 {
16610 dynamicCount = dynamicCount_;
16611 return *this;
16612 }
16613
16614 IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
16615 {
16616 divisor = divisor_;
16617 return *this;
16618 }
16619
16620 operator const VkIndirectCommandsLayoutTokenNVX&() const
16621 {
16622 return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
16623 }
16624
16625 bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
16626 {
16627 return ( tokenType == rhs.tokenType )
16628 && ( bindingUnit == rhs.bindingUnit )
16629 && ( dynamicCount == rhs.dynamicCount )
16630 && ( divisor == rhs.divisor );
16631 }
16632
16633 bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
16634 {
16635 return !operator==( rhs );
16636 }
16637
16638 IndirectCommandsTokenTypeNVX tokenType;
16639 uint32_t bindingUnit;
16640 uint32_t dynamicCount;
16641 uint32_t divisor;
16642 };
16643 static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
16644
16645 struct IndirectCommandsLayoutCreateInfoNVX
16646 {
16647 IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
16648 : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
16649 , pNext( nullptr )
16650 , pipelineBindPoint( pipelineBindPoint_ )
16651 , flags( flags_ )
16652 , tokenCount( tokenCount_ )
16653 , pTokens( pTokens_ )
16654 {
16655 }
16656
16657 IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16658 {
16659 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16660 }
16661
16662 IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16663 {
16664 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16665 return *this;
16666 }
16667
16668 IndirectCommandsLayoutCreateInfoNVX& setSType( StructureType sType_ )
16669 {
16670 sType = sType_;
16671 return *this;
16672 }
16673
16674 IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
16675 {
16676 pNext = pNext_;
16677 return *this;
16678 }
16679
16680 IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
16681 {
16682 pipelineBindPoint = pipelineBindPoint_;
16683 return *this;
16684 }
16685
16686 IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
16687 {
16688 flags = flags_;
16689 return *this;
16690 }
16691
16692 IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
16693 {
16694 tokenCount = tokenCount_;
16695 return *this;
16696 }
16697
16698 IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
16699 {
16700 pTokens = pTokens_;
16701 return *this;
16702 }
16703
16704 operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
16705 {
16706 return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
16707 }
16708
16709 bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16710 {
16711 return ( sType == rhs.sType )
16712 && ( pNext == rhs.pNext )
16713 && ( pipelineBindPoint == rhs.pipelineBindPoint )
16714 && ( flags == rhs.flags )
16715 && ( tokenCount == rhs.tokenCount )
16716 && ( pTokens == rhs.pTokens );
16717 }
16718
16719 bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16720 {
16721 return !operator==( rhs );
16722 }
16723
16724 private:
16725 StructureType sType;
16726
16727 public:
16728 const void* pNext;
16729 PipelineBindPoint pipelineBindPoint;
16730 IndirectCommandsLayoutUsageFlagsNVX flags;
16731 uint32_t tokenCount;
16732 const IndirectCommandsLayoutTokenNVX* pTokens;
16733 };
16734 static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
16735
16736 enum class ObjectEntryTypeNVX
16737 {
16738 eVkObjectEntryDescriptorSet = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
16739 eVkObjectEntryPipeline = VK_OBJECT_ENTRY_PIPELINE_NVX,
16740 eVkObjectEntryIndexBuffer = VK_OBJECT_ENTRY_INDEX_BUFFER_NVX,
16741 eVkObjectEntryVertexBuffer = VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX,
16742 eVkObjectEntryPushConstant = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX
16743 };
16744
16745 struct ObjectTableCreateInfoNVX
16746 {
16747 ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
16748 : sType( StructureType::eObjectTableCreateInfoNVX )
16749 , pNext( nullptr )
16750 , objectCount( objectCount_ )
16751 , pObjectEntryTypes( pObjectEntryTypes_ )
16752 , pObjectEntryCounts( pObjectEntryCounts_ )
16753 , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
16754 , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
16755 , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
16756 , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
16757 , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
16758 , maxPipelineLayouts( maxPipelineLayouts_ )
16759 {
16760 }
16761
16762 ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
16763 {
16764 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16765 }
16766
16767 ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
16768 {
16769 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16770 return *this;
16771 }
16772
16773 ObjectTableCreateInfoNVX& setSType( StructureType sType_ )
16774 {
16775 sType = sType_;
16776 return *this;
16777 }
16778
16779 ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
16780 {
16781 pNext = pNext_;
16782 return *this;
16783 }
16784
16785 ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
16786 {
16787 objectCount = objectCount_;
16788 return *this;
16789 }
16790
16791 ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
16792 {
16793 pObjectEntryTypes = pObjectEntryTypes_;
16794 return *this;
16795 }
16796
16797 ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
16798 {
16799 pObjectEntryCounts = pObjectEntryCounts_;
16800 return *this;
16801 }
16802
16803 ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
16804 {
16805 pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
16806 return *this;
16807 }
16808
16809 ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
16810 {
16811 maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
16812 return *this;
16813 }
16814
16815 ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
16816 {
16817 maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
16818 return *this;
16819 }
16820
16821 ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
16822 {
16823 maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
16824 return *this;
16825 }
16826
16827 ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
16828 {
16829 maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
16830 return *this;
16831 }
16832
16833 ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
16834 {
16835 maxPipelineLayouts = maxPipelineLayouts_;
16836 return *this;
16837 }
16838
16839 operator const VkObjectTableCreateInfoNVX&() const
16840 {
16841 return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
16842 }
16843
16844 bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
16845 {
16846 return ( sType == rhs.sType )
16847 && ( pNext == rhs.pNext )
16848 && ( objectCount == rhs.objectCount )
16849 && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
16850 && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
16851 && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
16852 && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
16853 && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
16854 && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
16855 && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
16856 && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
16857 }
16858
16859 bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
16860 {
16861 return !operator==( rhs );
16862 }
16863
16864 private:
16865 StructureType sType;
16866
16867 public:
16868 const void* pNext;
16869 uint32_t objectCount;
16870 const ObjectEntryTypeNVX* pObjectEntryTypes;
16871 const uint32_t* pObjectEntryCounts;
16872 const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
16873 uint32_t maxUniformBuffersPerDescriptor;
16874 uint32_t maxStorageBuffersPerDescriptor;
16875 uint32_t maxStorageImagesPerDescriptor;
16876 uint32_t maxSampledImagesPerDescriptor;
16877 uint32_t maxPipelineLayouts;
16878 };
16879 static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
16880
16881 struct ObjectTableEntryNVX
16882 {
16883 ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
16884 : type( type_ )
16885 , flags( flags_ )
16886 {
16887 }
16888
16889 ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
16890 {
16891 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16892 }
16893
16894 ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
16895 {
16896 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16897 return *this;
16898 }
16899
16900 ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
16901 {
16902 type = type_;
16903 return *this;
16904 }
16905
16906 ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16907 {
16908 flags = flags_;
16909 return *this;
16910 }
16911
16912 operator const VkObjectTableEntryNVX&() const
16913 {
16914 return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
16915 }
16916
16917 bool operator==( ObjectTableEntryNVX const& rhs ) const
16918 {
16919 return ( type == rhs.type )
16920 && ( flags == rhs.flags );
16921 }
16922
16923 bool operator!=( ObjectTableEntryNVX const& rhs ) const
16924 {
16925 return !operator==( rhs );
16926 }
16927
16928 ObjectEntryTypeNVX type;
16929 ObjectEntryUsageFlagsNVX flags;
16930 };
16931 static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
16932
16933 struct ObjectTablePipelineEntryNVX
16934 {
16935 ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
16936 : type( type_ )
16937 , flags( flags_ )
16938 , pipeline( pipeline_ )
16939 {
16940 }
16941
16942 ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
16943 {
16944 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16945 }
16946
16947 ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
16948 {
16949 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16950 return *this;
16951 }
16952
16953 ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
16954 {
16955 type = type_;
16956 return *this;
16957 }
16958
16959 ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16960 {
16961 flags = flags_;
16962 return *this;
16963 }
16964
16965 ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
16966 {
16967 pipeline = pipeline_;
16968 return *this;
16969 }
16970
16971 operator const VkObjectTablePipelineEntryNVX&() const
16972 {
16973 return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
16974 }
16975
16976 bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
16977 {
16978 return ( type == rhs.type )
16979 && ( flags == rhs.flags )
16980 && ( pipeline == rhs.pipeline );
16981 }
16982
16983 bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
16984 {
16985 return !operator==( rhs );
16986 }
16987
16988 ObjectEntryTypeNVX type;
16989 ObjectEntryUsageFlagsNVX flags;
16990 Pipeline pipeline;
16991 };
16992 static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
16993
16994 struct ObjectTableDescriptorSetEntryNVX
16995 {
16996 ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
16997 : type( type_ )
16998 , flags( flags_ )
16999 , pipelineLayout( pipelineLayout_ )
17000 , descriptorSet( descriptorSet_ )
17001 {
17002 }
17003
17004 ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
17005 {
17006 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
17007 }
17008
17009 ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
17010 {
17011 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
17012 return *this;
17013 }
17014
17015 ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
17016 {
17017 type = type_;
17018 return *this;
17019 }
17020
17021 ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17022 {
17023 flags = flags_;
17024 return *this;
17025 }
17026
17027 ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
17028 {
17029 pipelineLayout = pipelineLayout_;
17030 return *this;
17031 }
17032
17033 ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
17034 {
17035 descriptorSet = descriptorSet_;
17036 return *this;
17037 }
17038
17039 operator const VkObjectTableDescriptorSetEntryNVX&() const
17040 {
17041 return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
17042 }
17043
17044 bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
17045 {
17046 return ( type == rhs.type )
17047 && ( flags == rhs.flags )
17048 && ( pipelineLayout == rhs.pipelineLayout )
17049 && ( descriptorSet == rhs.descriptorSet );
17050 }
17051
17052 bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
17053 {
17054 return !operator==( rhs );
17055 }
17056
17057 ObjectEntryTypeNVX type;
17058 ObjectEntryUsageFlagsNVX flags;
17059 PipelineLayout pipelineLayout;
17060 DescriptorSet descriptorSet;
17061 };
17062 static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
17063
17064 struct ObjectTableVertexBufferEntryNVX
17065 {
17066 ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
17067 : type( type_ )
17068 , flags( flags_ )
17069 , buffer( buffer_ )
17070 {
17071 }
17072
17073 ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
17074 {
17075 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
17076 }
17077
17078 ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
17079 {
17080 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
17081 return *this;
17082 }
17083
17084 ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
17085 {
17086 type = type_;
17087 return *this;
17088 }
17089
17090 ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17091 {
17092 flags = flags_;
17093 return *this;
17094 }
17095
17096 ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
17097 {
17098 buffer = buffer_;
17099 return *this;
17100 }
17101
17102 operator const VkObjectTableVertexBufferEntryNVX&() const
17103 {
17104 return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
17105 }
17106
17107 bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
17108 {
17109 return ( type == rhs.type )
17110 && ( flags == rhs.flags )
17111 && ( buffer == rhs.buffer );
17112 }
17113
17114 bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
17115 {
17116 return !operator==( rhs );
17117 }
17118
17119 ObjectEntryTypeNVX type;
17120 ObjectEntryUsageFlagsNVX flags;
17121 Buffer buffer;
17122 };
17123 static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
17124
17125 struct ObjectTableIndexBufferEntryNVX
17126 {
Mark Youngb5f087a2017-01-19 21:10:49 -070017127 ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer(), IndexType indexType_ = IndexType::eUint16 )
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017128 : type( type_ )
17129 , flags( flags_ )
17130 , buffer( buffer_ )
Mark Youngb5f087a2017-01-19 21:10:49 -070017131 , indexType( indexType_ )
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017132 {
17133 }
17134
17135 ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
17136 {
17137 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
17138 }
17139
17140 ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
17141 {
17142 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
17143 return *this;
17144 }
17145
17146 ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
17147 {
17148 type = type_;
17149 return *this;
17150 }
17151
17152 ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17153 {
17154 flags = flags_;
17155 return *this;
17156 }
17157
17158 ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
17159 {
17160 buffer = buffer_;
17161 return *this;
17162 }
17163
Mark Youngb5f087a2017-01-19 21:10:49 -070017164 ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ )
17165 {
17166 indexType = indexType_;
17167 return *this;
17168 }
17169
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017170 operator const VkObjectTableIndexBufferEntryNVX&() const
17171 {
17172 return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
17173 }
17174
17175 bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
17176 {
17177 return ( type == rhs.type )
17178 && ( flags == rhs.flags )
Mark Youngb5f087a2017-01-19 21:10:49 -070017179 && ( buffer == rhs.buffer )
17180 && ( indexType == rhs.indexType );
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017181 }
17182
17183 bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
17184 {
17185 return !operator==( rhs );
17186 }
17187
17188 ObjectEntryTypeNVX type;
17189 ObjectEntryUsageFlagsNVX flags;
17190 Buffer buffer;
Mark Youngb5f087a2017-01-19 21:10:49 -070017191 IndexType indexType;
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017192 };
17193 static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
17194
17195 struct ObjectTablePushConstantEntryNVX
17196 {
17197 ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
17198 : type( type_ )
17199 , flags( flags_ )
17200 , pipelineLayout( pipelineLayout_ )
17201 , stageFlags( stageFlags_ )
17202 {
17203 }
17204
17205 ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
17206 {
17207 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
17208 }
17209
17210 ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
17211 {
17212 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
17213 return *this;
17214 }
17215
17216 ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
17217 {
17218 type = type_;
17219 return *this;
17220 }
17221
17222 ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17223 {
17224 flags = flags_;
17225 return *this;
17226 }
17227
17228 ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
17229 {
17230 pipelineLayout = pipelineLayout_;
17231 return *this;
17232 }
17233
17234 ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
17235 {
17236 stageFlags = stageFlags_;
17237 return *this;
17238 }
17239
17240 operator const VkObjectTablePushConstantEntryNVX&() const
17241 {
17242 return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
17243 }
17244
17245 bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
17246 {
17247 return ( type == rhs.type )
17248 && ( flags == rhs.flags )
17249 && ( pipelineLayout == rhs.pipelineLayout )
17250 && ( stageFlags == rhs.stageFlags );
17251 }
17252
17253 bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
17254 {
17255 return !operator==( rhs );
17256 }
17257
17258 ObjectEntryTypeNVX type;
17259 ObjectEntryUsageFlagsNVX flags;
17260 PipelineLayout pipelineLayout;
17261 ShaderStageFlags stageFlags;
17262 };
17263 static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
17264
Mark Youngb5f087a2017-01-19 21:10:49 -070017265 enum class SurfaceCounterFlagBitsEXT
17266 {
17267 eVblankExt = VK_SURFACE_COUNTER_VBLANK_EXT
17268 };
17269
17270 using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
17271
17272 VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 )
17273 {
17274 return SurfaceCounterFlagsEXT( bit0 ) | bit1;
17275 }
17276
17277 VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
17278 {
17279 return ~( SurfaceCounterFlagsEXT( bits ) );
17280 }
17281
17282 template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
17283 {
17284 enum
17285 {
17286 allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblankExt)
17287 };
17288 };
17289
17290 struct SurfaceCapabilities2EXT
17291 {
17292 operator const VkSurfaceCapabilities2EXT&() const
17293 {
17294 return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>(this);
17295 }
17296
17297 bool operator==( SurfaceCapabilities2EXT const& rhs ) const
17298 {
17299 return ( sType == rhs.sType )
17300 && ( pNext == rhs.pNext )
17301 && ( minImageCount == rhs.minImageCount )
17302 && ( maxImageCount == rhs.maxImageCount )
17303 && ( currentExtent == rhs.currentExtent )
17304 && ( minImageExtent == rhs.minImageExtent )
17305 && ( maxImageExtent == rhs.maxImageExtent )
17306 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
17307 && ( supportedTransforms == rhs.supportedTransforms )
17308 && ( currentTransform == rhs.currentTransform )
17309 && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
17310 && ( supportedUsageFlags == rhs.supportedUsageFlags )
17311 && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
17312 }
17313
17314 bool operator!=( SurfaceCapabilities2EXT const& rhs ) const
17315 {
17316 return !operator==( rhs );
17317 }
17318
17319 private:
17320 StructureType sType;
17321
17322 public:
17323 void* pNext;
17324 uint32_t minImageCount;
17325 uint32_t maxImageCount;
17326 Extent2D currentExtent;
17327 Extent2D minImageExtent;
17328 Extent2D maxImageExtent;
17329 uint32_t maxImageArrayLayers;
17330 SurfaceTransformFlagsKHR supportedTransforms;
17331 SurfaceTransformFlagBitsKHR currentTransform;
17332 CompositeAlphaFlagsKHR supportedCompositeAlpha;
17333 ImageUsageFlags supportedUsageFlags;
17334 SurfaceCounterFlagsEXT supportedSurfaceCounters;
17335 };
17336 static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
17337
17338 struct SwapchainCounterCreateInfoEXT
17339 {
17340 SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() )
17341 : sType( StructureType::eSwapchainCounterCreateInfoEXT )
17342 , pNext( nullptr )
17343 , surfaceCounters( surfaceCounters_ )
17344 {
17345 }
17346
17347 SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs )
17348 {
17349 memcpy( this, &rhs, sizeof(SwapchainCounterCreateInfoEXT) );
17350 }
17351
17352 SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs )
17353 {
17354 memcpy( this, &rhs, sizeof(SwapchainCounterCreateInfoEXT) );
17355 return *this;
17356 }
17357
17358 SwapchainCounterCreateInfoEXT& setSType( StructureType sType_ )
17359 {
17360 sType = sType_;
17361 return *this;
17362 }
17363
17364 SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ )
17365 {
17366 pNext = pNext_;
17367 return *this;
17368 }
17369
17370 SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ )
17371 {
17372 surfaceCounters = surfaceCounters_;
17373 return *this;
17374 }
17375
17376 operator const VkSwapchainCounterCreateInfoEXT&() const
17377 {
17378 return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>(this);
17379 }
17380
17381 bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const
17382 {
17383 return ( sType == rhs.sType )
17384 && ( pNext == rhs.pNext )
17385 && ( surfaceCounters == rhs.surfaceCounters );
17386 }
17387
17388 bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const
17389 {
17390 return !operator==( rhs );
17391 }
17392
17393 private:
17394 StructureType sType;
17395
17396 public:
17397 const void* pNext;
17398 SurfaceCounterFlagsEXT surfaceCounters;
17399 };
17400 static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
17401
17402 enum class DisplayPowerStateEXT
17403 {
17404 eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
17405 eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
17406 eOn = VK_DISPLAY_POWER_STATE_ON_EXT
17407 };
17408
17409 struct DisplayPowerInfoEXT
17410 {
17411 DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff )
17412 : sType( StructureType::eDisplayPowerInfoEXT )
17413 , pNext( nullptr )
17414 , powerState( powerState_ )
17415 {
17416 }
17417
17418 DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs )
17419 {
17420 memcpy( this, &rhs, sizeof(DisplayPowerInfoEXT) );
17421 }
17422
17423 DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs )
17424 {
17425 memcpy( this, &rhs, sizeof(DisplayPowerInfoEXT) );
17426 return *this;
17427 }
17428
17429 DisplayPowerInfoEXT& setSType( StructureType sType_ )
17430 {
17431 sType = sType_;
17432 return *this;
17433 }
17434
17435 DisplayPowerInfoEXT& setPNext( const void* pNext_ )
17436 {
17437 pNext = pNext_;
17438 return *this;
17439 }
17440
17441 DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ )
17442 {
17443 powerState = powerState_;
17444 return *this;
17445 }
17446
17447 operator const VkDisplayPowerInfoEXT&() const
17448 {
17449 return *reinterpret_cast<const VkDisplayPowerInfoEXT*>(this);
17450 }
17451
17452 bool operator==( DisplayPowerInfoEXT const& rhs ) const
17453 {
17454 return ( sType == rhs.sType )
17455 && ( pNext == rhs.pNext )
17456 && ( powerState == rhs.powerState );
17457 }
17458
17459 bool operator!=( DisplayPowerInfoEXT const& rhs ) const
17460 {
17461 return !operator==( rhs );
17462 }
17463
17464 private:
17465 StructureType sType;
17466
17467 public:
17468 const void* pNext;
17469 DisplayPowerStateEXT powerState;
17470 };
17471 static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
17472
17473 enum class DeviceEventTypeEXT
17474 {
17475 eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
17476 };
17477
17478 struct DeviceEventInfoEXT
17479 {
17480 DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug )
17481 : sType( StructureType::eDeviceEventInfoEXT )
17482 , pNext( nullptr )
17483 , deviceEvent( deviceEvent_ )
17484 {
17485 }
17486
17487 DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs )
17488 {
17489 memcpy( this, &rhs, sizeof(DeviceEventInfoEXT) );
17490 }
17491
17492 DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs )
17493 {
17494 memcpy( this, &rhs, sizeof(DeviceEventInfoEXT) );
17495 return *this;
17496 }
17497
17498 DeviceEventInfoEXT& setSType( StructureType sType_ )
17499 {
17500 sType = sType_;
17501 return *this;
17502 }
17503
17504 DeviceEventInfoEXT& setPNext( const void* pNext_ )
17505 {
17506 pNext = pNext_;
17507 return *this;
17508 }
17509
17510 DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ )
17511 {
17512 deviceEvent = deviceEvent_;
17513 return *this;
17514 }
17515
17516 operator const VkDeviceEventInfoEXT&() const
17517 {
17518 return *reinterpret_cast<const VkDeviceEventInfoEXT*>(this);
17519 }
17520
17521 bool operator==( DeviceEventInfoEXT const& rhs ) const
17522 {
17523 return ( sType == rhs.sType )
17524 && ( pNext == rhs.pNext )
17525 && ( deviceEvent == rhs.deviceEvent );
17526 }
17527
17528 bool operator!=( DeviceEventInfoEXT const& rhs ) const
17529 {
17530 return !operator==( rhs );
17531 }
17532
17533 private:
17534 StructureType sType;
17535
17536 public:
17537 const void* pNext;
17538 DeviceEventTypeEXT deviceEvent;
17539 };
17540 static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
17541
17542 enum class DisplayEventTypeEXT
17543 {
17544 eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
17545 };
17546
17547 struct DisplayEventInfoEXT
17548 {
17549 DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut )
17550 : sType( StructureType::eDisplayEventInfoEXT )
17551 , pNext( nullptr )
17552 , displayEvent( displayEvent_ )
17553 {
17554 }
17555
17556 DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs )
17557 {
17558 memcpy( this, &rhs, sizeof(DisplayEventInfoEXT) );
17559 }
17560
17561 DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs )
17562 {
17563 memcpy( this, &rhs, sizeof(DisplayEventInfoEXT) );
17564 return *this;
17565 }
17566
17567 DisplayEventInfoEXT& setSType( StructureType sType_ )
17568 {
17569 sType = sType_;
17570 return *this;
17571 }
17572
17573 DisplayEventInfoEXT& setPNext( const void* pNext_ )
17574 {
17575 pNext = pNext_;
17576 return *this;
17577 }
17578
17579 DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ )
17580 {
17581 displayEvent = displayEvent_;
17582 return *this;
17583 }
17584
17585 operator const VkDisplayEventInfoEXT&() const
17586 {
17587 return *reinterpret_cast<const VkDisplayEventInfoEXT*>(this);
17588 }
17589
17590 bool operator==( DisplayEventInfoEXT const& rhs ) const
17591 {
17592 return ( sType == rhs.sType )
17593 && ( pNext == rhs.pNext )
17594 && ( displayEvent == rhs.displayEvent );
17595 }
17596
17597 bool operator!=( DisplayEventInfoEXT const& rhs ) const
17598 {
17599 return !operator==( rhs );
17600 }
17601
17602 private:
17603 StructureType sType;
17604
17605 public:
17606 const void* pNext;
17607 DisplayEventTypeEXT displayEvent;
17608 };
17609 static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
17610
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070017611 VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
17612 {
17613 return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
17614 }
17615
17616#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17617 template <typename Allocator = std::allocator<LayerProperties>>
17618 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
17619 {
17620 std::vector<LayerProperties,Allocator> properties;
17621 uint32_t propertyCount;
17622 Result result;
17623 do
17624 {
17625 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
17626 if ( ( result == Result::eSuccess ) && propertyCount )
17627 {
17628 properties.resize( propertyCount );
17629 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
17630 }
17631 } while ( result == Result::eIncomplete );
17632 assert( propertyCount <= properties.size() );
17633 properties.resize( propertyCount );
17634 return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
17635 }
17636#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17637
17638 VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
17639 {
17640 return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
17641 }
17642
17643#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17644 template <typename Allocator = std::allocator<ExtensionProperties>>
17645 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr )
17646 {
17647 std::vector<ExtensionProperties,Allocator> properties;
17648 uint32_t propertyCount;
17649 Result result;
17650 do
17651 {
17652 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
17653 if ( ( result == Result::eSuccess ) && propertyCount )
17654 {
17655 properties.resize( propertyCount );
17656 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
17657 }
17658 } while ( result == Result::eIncomplete );
17659 assert( propertyCount <= properties.size() );
17660 properties.resize( propertyCount );
17661 return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
17662 }
17663#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17664
17665 // forward declarations
17666 struct CmdProcessCommandsInfoNVX;
17667
Lenny Komowb0a17f22016-08-11 11:23:15 -060017668 class CommandBuffer
17669 {
17670 public:
17671 CommandBuffer()
17672 : m_commandBuffer(VK_NULL_HANDLE)
17673 {}
17674
17675#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17676 CommandBuffer(VkCommandBuffer commandBuffer)
17677 : m_commandBuffer(commandBuffer)
17678 {}
17679
17680 CommandBuffer& operator=(VkCommandBuffer commandBuffer)
17681 {
17682 m_commandBuffer = commandBuffer;
17683 return *this;
17684 }
17685#endif
17686
Lenny Komow339ffcd2016-08-26 14:10:08 -060017687 bool operator==(CommandBuffer const &rhs) const
17688 {
17689 return m_commandBuffer == rhs.m_commandBuffer;
17690 }
17691
17692 bool operator!=(CommandBuffer const &rhs) const
17693 {
17694 return m_commandBuffer != rhs.m_commandBuffer;
17695 }
17696
17697 bool operator<(CommandBuffer const &rhs) const
17698 {
17699 return m_commandBuffer < rhs.m_commandBuffer;
17700 }
17701
Lenny Komowb0a17f22016-08-11 11:23:15 -060017702 Result begin( const CommandBufferBeginInfo* pBeginInfo ) const
17703 {
17704 return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
17705 }
17706
17707#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17708 ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const
17709 {
17710 Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
17711 return createResultValue( result, "vk::CommandBuffer::begin" );
17712 }
17713#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17714
17715#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17716 Result end( ) const
17717 {
17718 return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
17719 }
17720#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17721
17722#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17723 ResultValueType<void>::type end() const
17724 {
17725 Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
17726 return createResultValue( result, "vk::CommandBuffer::end" );
17727 }
17728#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17729
17730#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17731 Result reset( CommandBufferResetFlags flags ) const
17732 {
17733 return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
17734 }
17735#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17736
17737#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17738 ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const
17739 {
17740 Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
17741 return createResultValue( result, "vk::CommandBuffer::reset" );
17742 }
17743#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17744
17745#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17746 void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
17747 {
17748 vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
17749 }
17750#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17751
17752#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17753 void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
17754 {
17755 vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
17756 }
17757#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17758
17759 void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
17760 {
17761 vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
17762 }
17763
17764#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17765 void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
17766 {
17767 vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
17768 }
17769#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17770
17771 void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
17772 {
17773 vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
17774 }
17775
17776#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17777 void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
17778 {
17779 vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
17780 }
17781#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17782
17783#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17784 void setLineWidth( float lineWidth ) const
17785 {
17786 vkCmdSetLineWidth( m_commandBuffer, lineWidth );
17787 }
17788#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17789
17790#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17791 void setLineWidth( float lineWidth ) const
17792 {
17793 vkCmdSetLineWidth( m_commandBuffer, lineWidth );
17794 }
17795#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17796
17797#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17798 void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
17799 {
17800 vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
17801 }
17802#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17803
17804#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17805 void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
17806 {
17807 vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
17808 }
17809#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17810
17811#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17812 void setBlendConstants( const float blendConstants[4] ) const
17813 {
17814 vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
17815 }
17816#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17817
17818#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17819 void setBlendConstants( const float blendConstants[4] ) const
17820 {
17821 vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
17822 }
17823#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17824
17825#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17826 void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
17827 {
17828 vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
17829 }
17830#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17831
17832#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17833 void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
17834 {
17835 vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
17836 }
17837#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17838
17839#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17840 void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
17841 {
17842 vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
17843 }
17844#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17845
17846#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17847 void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
17848 {
17849 vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
17850 }
17851#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17852
17853#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17854 void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
17855 {
17856 vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
17857 }
17858#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17859
17860#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17861 void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
17862 {
17863 vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
17864 }
17865#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17866
17867#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17868 void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
17869 {
17870 vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
17871 }
17872#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17873
17874#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17875 void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
17876 {
17877 vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
17878 }
17879#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17880
17881 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
17882 {
17883 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
17884 }
17885
17886#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17887 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
17888 {
17889 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
17890 }
17891#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17892
17893#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17894 void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
17895 {
17896 vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
17897 }
17898#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17899
17900#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17901 void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
17902 {
17903 vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
17904 }
17905#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17906
17907 void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
17908 {
17909 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
17910 }
17911
17912#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17913 void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
17914 {
17915#ifdef VULKAN_HPP_NO_EXCEPTIONS
17916 assert( buffers.size() == offsets.size() );
17917#else
17918 if ( buffers.size() != offsets.size() )
17919 {
17920 throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
17921 }
17922#endif // VULKAN_HPP_NO_EXCEPTIONS
17923 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
17924 }
17925#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17926
17927#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17928 void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
17929 {
17930 vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
17931 }
17932#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17933
17934#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17935 void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
17936 {
17937 vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
17938 }
17939#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17940
17941#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17942 void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
17943 {
17944 vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
17945 }
17946#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17947
17948#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17949 void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
17950 {
17951 vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
17952 }
17953#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17954
17955#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17956 void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17957 {
17958 vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17959 }
17960#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17961
17962#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17963 void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17964 {
17965 vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17966 }
17967#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17968
17969#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17970 void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17971 {
17972 vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17973 }
17974#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17975
17976#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17977 void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17978 {
17979 vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17980 }
17981#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17982
17983#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17984 void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
17985 {
17986 vkCmdDispatch( m_commandBuffer, x, y, z );
17987 }
17988#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17989
17990#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17991 void dispatch( uint32_t x, uint32_t y, uint32_t z ) const
17992 {
17993 vkCmdDispatch( m_commandBuffer, x, y, z );
17994 }
17995#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17996
17997#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17998 void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
17999 {
18000 vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
18001 }
18002#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18003
18004#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18005 void dispatchIndirect( Buffer buffer, DeviceSize offset ) const
18006 {
18007 vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
18008 }
18009#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18010
18011 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
18012 {
18013 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
18014 }
18015
18016#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18017 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
18018 {
18019 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
18020 }
18021#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18022
18023 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
18024 {
18025 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
18026 }
18027
18028#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18029 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
18030 {
18031 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
18032 }
18033#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18034
18035 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
18036 {
18037 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
18038 }
18039
18040#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18041 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
18042 {
18043 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
18044 }
18045#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18046
18047 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
18048 {
18049 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
18050 }
18051
18052#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18053 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
18054 {
18055 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
18056 }
18057#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18058
18059 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
18060 {
18061 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
18062 }
18063
18064#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18065 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
18066 {
18067 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
18068 }
18069#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18070
18071 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
18072 {
18073 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
18074 }
18075
18076#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18077 template <typename T>
18078 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
18079 {
18080 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
18081 }
18082#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18083
18084#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18085 void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
18086 {
18087 vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
18088 }
18089#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18090
18091#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18092 void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
18093 {
18094 vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
18095 }
18096#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18097
18098 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
18099 {
18100 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
18101 }
18102
18103#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18104 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
18105 {
18106 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
18107 }
18108#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18109
18110 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
18111 {
18112 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
18113 }
18114
18115#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18116 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
18117 {
18118 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
18119 }
18120#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18121
18122 void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
18123 {
18124 vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
18125 }
18126
18127#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18128 void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
18129 {
18130 vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
18131 }
18132#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18133
18134 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
18135 {
18136 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
18137 }
18138
18139#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18140 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
18141 {
18142 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
18143 }
18144#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18145
18146#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18147 void setEvent( Event event, PipelineStageFlags stageMask ) const
18148 {
18149 vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18150 }
18151#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18152
18153#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18154 void setEvent( Event event, PipelineStageFlags stageMask ) const
18155 {
18156 vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18157 }
18158#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18159
18160#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18161 void resetEvent( Event event, PipelineStageFlags stageMask ) const
18162 {
18163 vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18164 }
18165#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18166
18167#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18168 void resetEvent( Event event, PipelineStageFlags stageMask ) const
18169 {
18170 vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18171 }
18172#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18173
18174 void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
18175 {
18176 vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
18177 }
18178
18179#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18180 void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
18181 {
18182 vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
18183 }
18184#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18185
18186 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
18187 {
18188 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
18189 }
18190
18191#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18192 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
18193 {
18194 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
18195 }
18196#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18197
18198#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18199 void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
18200 {
18201 vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
18202 }
18203#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18204
18205#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18206 void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
18207 {
18208 vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
18209 }
18210#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18211
18212#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18213 void endQuery( QueryPool queryPool, uint32_t query ) const
18214 {
18215 vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
18216 }
18217#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18218
18219#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18220 void endQuery( QueryPool queryPool, uint32_t query ) const
18221 {
18222 vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
18223 }
18224#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18225
18226#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18227 void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
18228 {
18229 vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
18230 }
18231#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18232
18233#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18234 void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
18235 {
18236 vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
18237 }
18238#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18239
18240#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18241 void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
18242 {
18243 vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
18244 }
18245#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18246
18247#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18248 void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
18249 {
18250 vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
18251 }
18252#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18253
18254#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18255 void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
18256 {
18257 vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
18258 }
18259#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18260
18261#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18262 void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
18263 {
18264 vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
18265 }
18266#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18267
18268 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
18269 {
18270 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
18271 }
18272
18273#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18274 template <typename T>
18275 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
18276 {
18277 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
18278 }
18279#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18280
18281 void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
18282 {
18283 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
18284 }
18285
18286#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18287 void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
18288 {
18289 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
18290 }
18291#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18292
18293#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18294 void nextSubpass( SubpassContents contents ) const
18295 {
18296 vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
18297 }
18298#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18299
18300#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18301 void nextSubpass( SubpassContents contents ) const
18302 {
18303 vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
18304 }
18305#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18306
18307#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18308 void endRenderPass( ) const
18309 {
18310 vkCmdEndRenderPass( m_commandBuffer );
18311 }
18312#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18313
18314#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18315 void endRenderPass() const
18316 {
18317 vkCmdEndRenderPass( m_commandBuffer );
18318 }
18319#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18320
18321 void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
18322 {
18323 vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
18324 }
18325
18326#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18327 void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
18328 {
18329 vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
18330 }
18331#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18332
18333 void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
18334 {
18335 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
18336 }
18337
18338#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18339 DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const
18340 {
18341 DebugMarkerMarkerInfoEXT markerInfo;
18342 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
18343 return markerInfo;
18344 }
18345#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18346
18347#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18348 void debugMarkerEndEXT( ) const
18349 {
18350 vkCmdDebugMarkerEndEXT( m_commandBuffer );
18351 }
18352#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18353
18354#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18355 void debugMarkerEndEXT() const
18356 {
18357 vkCmdDebugMarkerEndEXT( m_commandBuffer );
18358 }
18359#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18360
18361 void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
18362 {
18363 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
18364 }
18365
18366#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18367 DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const
18368 {
18369 DebugMarkerMarkerInfoEXT markerInfo;
18370 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
18371 return markerInfo;
18372 }
18373#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18374
Lenny Komow96962992016-08-31 15:03:49 -060018375#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18376 void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18377 {
18378 vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18379 }
18380#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18381
18382#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18383 void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18384 {
18385 vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18386 }
18387#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18388
18389#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18390 void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18391 {
18392 vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18393 }
18394#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18395
18396#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18397 void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18398 {
18399 vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18400 }
18401#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18402
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070018403 void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
18404 {
18405 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
18406 }
18407
18408#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18409 void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
18410 {
18411 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
18412 }
18413#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18414
18415 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
18416 {
18417 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
18418 }
18419
18420#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18421 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
18422 {
18423 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
18424 }
18425#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18426
Lenny Komowb0a17f22016-08-11 11:23:15 -060018427#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18428 explicit
18429#endif
18430 operator VkCommandBuffer() const
18431 {
18432 return m_commandBuffer;
18433 }
18434
18435 explicit operator bool() const
18436 {
18437 return m_commandBuffer != VK_NULL_HANDLE;
18438 }
18439
18440 bool operator!() const
18441 {
18442 return m_commandBuffer == VK_NULL_HANDLE;
18443 }
18444
18445 private:
18446 VkCommandBuffer m_commandBuffer;
18447 };
18448 static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
18449
Lenny Komowb0a17f22016-08-11 11:23:15 -060018450 struct SubmitInfo
18451 {
18452 SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
18453 : sType( StructureType::eSubmitInfo )
18454 , pNext( nullptr )
18455 , waitSemaphoreCount( waitSemaphoreCount_ )
18456 , pWaitSemaphores( pWaitSemaphores_ )
18457 , pWaitDstStageMask( pWaitDstStageMask_ )
18458 , commandBufferCount( commandBufferCount_ )
18459 , pCommandBuffers( pCommandBuffers_ )
18460 , signalSemaphoreCount( signalSemaphoreCount_ )
18461 , pSignalSemaphores( pSignalSemaphores_ )
18462 {
18463 }
18464
18465 SubmitInfo( VkSubmitInfo const & rhs )
18466 {
18467 memcpy( this, &rhs, sizeof(SubmitInfo) );
18468 }
18469
18470 SubmitInfo& operator=( VkSubmitInfo const & rhs )
18471 {
18472 memcpy( this, &rhs, sizeof(SubmitInfo) );
18473 return *this;
18474 }
18475
18476 SubmitInfo& setSType( StructureType sType_ )
18477 {
18478 sType = sType_;
18479 return *this;
18480 }
18481
18482 SubmitInfo& setPNext( const void* pNext_ )
18483 {
18484 pNext = pNext_;
18485 return *this;
18486 }
18487
18488 SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
18489 {
18490 waitSemaphoreCount = waitSemaphoreCount_;
18491 return *this;
18492 }
18493
18494 SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
18495 {
18496 pWaitSemaphores = pWaitSemaphores_;
18497 return *this;
18498 }
18499
18500 SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
18501 {
18502 pWaitDstStageMask = pWaitDstStageMask_;
18503 return *this;
18504 }
18505
18506 SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
18507 {
18508 commandBufferCount = commandBufferCount_;
18509 return *this;
18510 }
18511
18512 SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
18513 {
18514 pCommandBuffers = pCommandBuffers_;
18515 return *this;
18516 }
18517
18518 SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
18519 {
18520 signalSemaphoreCount = signalSemaphoreCount_;
18521 return *this;
18522 }
18523
18524 SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
18525 {
18526 pSignalSemaphores = pSignalSemaphores_;
18527 return *this;
18528 }
18529
18530 operator const VkSubmitInfo&() const
18531 {
18532 return *reinterpret_cast<const VkSubmitInfo*>(this);
18533 }
18534
18535 bool operator==( SubmitInfo const& rhs ) const
18536 {
18537 return ( sType == rhs.sType )
18538 && ( pNext == rhs.pNext )
18539 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
18540 && ( pWaitSemaphores == rhs.pWaitSemaphores )
18541 && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
18542 && ( commandBufferCount == rhs.commandBufferCount )
18543 && ( pCommandBuffers == rhs.pCommandBuffers )
18544 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
18545 && ( pSignalSemaphores == rhs.pSignalSemaphores );
18546 }
18547
18548 bool operator!=( SubmitInfo const& rhs ) const
18549 {
18550 return !operator==( rhs );
18551 }
18552
18553 private:
18554 StructureType sType;
18555
18556 public:
18557 const void* pNext;
18558 uint32_t waitSemaphoreCount;
18559 const Semaphore* pWaitSemaphores;
18560 const PipelineStageFlags* pWaitDstStageMask;
18561 uint32_t commandBufferCount;
18562 const CommandBuffer* pCommandBuffers;
18563 uint32_t signalSemaphoreCount;
18564 const Semaphore* pSignalSemaphores;
18565 };
18566 static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
18567
18568 class Queue
18569 {
18570 public:
18571 Queue()
18572 : m_queue(VK_NULL_HANDLE)
18573 {}
18574
18575#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18576 Queue(VkQueue queue)
18577 : m_queue(queue)
18578 {}
18579
18580 Queue& operator=(VkQueue queue)
18581 {
18582 m_queue = queue;
18583 return *this;
18584 }
18585#endif
18586
Lenny Komow339ffcd2016-08-26 14:10:08 -060018587 bool operator==(Queue const &rhs) const
18588 {
18589 return m_queue == rhs.m_queue;
18590 }
18591
18592 bool operator!=(Queue const &rhs) const
18593 {
18594 return m_queue != rhs.m_queue;
18595 }
18596
18597 bool operator<(Queue const &rhs) const
18598 {
18599 return m_queue < rhs.m_queue;
18600 }
18601
Lenny Komowb0a17f22016-08-11 11:23:15 -060018602 Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
18603 {
18604 return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
18605 }
18606
18607#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18608 ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
18609 {
18610 Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
18611 return createResultValue( result, "vk::Queue::submit" );
18612 }
18613#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18614
18615#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18616 Result waitIdle( ) const
18617 {
18618 return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
18619 }
18620#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18621
18622#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18623 ResultValueType<void>::type waitIdle() const
18624 {
18625 Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
18626 return createResultValue( result, "vk::Queue::waitIdle" );
18627 }
18628#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18629
18630 Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
18631 {
18632 return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
18633 }
18634
18635#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18636 ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
18637 {
18638 Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
18639 return createResultValue( result, "vk::Queue::bindSparse" );
18640 }
18641#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18642
18643 Result presentKHR( const PresentInfoKHR* pPresentInfo ) const
18644 {
18645 return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
18646 }
18647
18648#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18649 Result presentKHR( const PresentInfoKHR & presentInfo ) const
18650 {
18651 Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
18652 return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
18653 }
18654#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18655
18656#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18657 explicit
18658#endif
18659 operator VkQueue() const
18660 {
18661 return m_queue;
18662 }
18663
18664 explicit operator bool() const
18665 {
18666 return m_queue != VK_NULL_HANDLE;
18667 }
18668
18669 bool operator!() const
18670 {
18671 return m_queue == VK_NULL_HANDLE;
18672 }
18673
18674 private:
18675 VkQueue m_queue;
18676 };
18677 static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
18678
Lenny Komowb0a17f22016-08-11 11:23:15 -060018679 class Device
18680 {
18681 public:
18682 Device()
18683 : m_device(VK_NULL_HANDLE)
18684 {}
18685
18686#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18687 Device(VkDevice device)
18688 : m_device(device)
18689 {}
18690
18691 Device& operator=(VkDevice device)
18692 {
18693 m_device = device;
18694 return *this;
18695 }
18696#endif
18697
Lenny Komow339ffcd2016-08-26 14:10:08 -060018698 bool operator==(Device const &rhs) const
18699 {
18700 return m_device == rhs.m_device;
18701 }
18702
18703 bool operator!=(Device const &rhs) const
18704 {
18705 return m_device != rhs.m_device;
18706 }
18707
18708 bool operator<(Device const &rhs) const
18709 {
18710 return m_device < rhs.m_device;
18711 }
18712
Lenny Komowb0a17f22016-08-11 11:23:15 -060018713 PFN_vkVoidFunction getProcAddr( const char* pName ) const
18714 {
18715 return vkGetDeviceProcAddr( m_device, pName );
18716 }
18717
18718#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18719 PFN_vkVoidFunction getProcAddr( const std::string & name ) const
18720 {
18721 return vkGetDeviceProcAddr( m_device, name.c_str() );
18722 }
18723#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18724
18725 void destroy( const AllocationCallbacks* pAllocator ) const
18726 {
18727 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18728 }
18729
18730#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18731 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const
18732 {
18733 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18734 }
18735#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18736
18737 void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const
18738 {
18739 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
18740 }
18741
18742#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18743 Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
18744 {
18745 Queue queue;
18746 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
18747 return queue;
18748 }
18749#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18750
18751#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18752 Result waitIdle( ) const
18753 {
18754 return static_cast<Result>( vkDeviceWaitIdle( m_device ) );
18755 }
18756#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18757
18758#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18759 ResultValueType<void>::type waitIdle() const
18760 {
18761 Result result = static_cast<Result>( vkDeviceWaitIdle( m_device ) );
18762 return createResultValue( result, "vk::Device::waitIdle" );
18763 }
18764#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18765
18766 Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const
18767 {
18768 return static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
18769 }
18770
18771#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18772 ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18773 {
18774 DeviceMemory memory;
18775 Result result = static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
18776 return createResultValue( result, memory, "vk::Device::allocateMemory" );
18777 }
18778#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18779
18780 void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const
18781 {
18782 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18783 }
18784
18785#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18786 void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr ) const
18787 {
18788 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18789 }
18790#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18791
18792#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18793 Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const
18794 {
18795 return static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
18796 }
18797#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18798
18799#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18800 ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags() ) const
18801 {
18802 void* pData;
18803 Result result = static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
18804 return createResultValue( result, pData, "vk::Device::mapMemory" );
18805 }
18806#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18807
18808#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18809 void unmapMemory( DeviceMemory memory ) const
18810 {
18811 vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
18812 }
18813#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18814
18815#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18816 void unmapMemory( DeviceMemory memory ) const
18817 {
18818 vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
18819 }
18820#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18821
18822 Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
18823 {
18824 return static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
18825 }
18826
18827#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18828 ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
18829 {
18830 Result result = static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
18831 return createResultValue( result, "vk::Device::flushMappedMemoryRanges" );
18832 }
18833#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18834
18835 Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
18836 {
18837 return static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
18838 }
18839
18840#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18841 ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
18842 {
18843 Result result = static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
18844 return createResultValue( result, "vk::Device::invalidateMappedMemoryRanges" );
18845 }
18846#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18847
18848 void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const
18849 {
18850 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
18851 }
18852
18853#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18854 DeviceSize getMemoryCommitment( DeviceMemory memory ) const
18855 {
18856 DeviceSize committedMemoryInBytes;
18857 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
18858 return committedMemoryInBytes;
18859 }
18860#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18861
18862 void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const
18863 {
18864 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
18865 }
18866
18867#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18868 MemoryRequirements getBufferMemoryRequirements( Buffer buffer ) const
18869 {
18870 MemoryRequirements memoryRequirements;
18871 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
18872 return memoryRequirements;
18873 }
18874#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18875
18876#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18877 Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
18878 {
18879 return static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
18880 }
18881#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18882
18883#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18884 ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
18885 {
18886 Result result = static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
18887 return createResultValue( result, "vk::Device::bindBufferMemory" );
18888 }
18889#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18890
18891 void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const
18892 {
18893 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
18894 }
18895
18896#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18897 MemoryRequirements getImageMemoryRequirements( Image image ) const
18898 {
18899 MemoryRequirements memoryRequirements;
18900 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
18901 return memoryRequirements;
18902 }
18903#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18904
18905#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18906 Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
18907 {
18908 return static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
18909 }
18910#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18911
18912#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18913 ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
18914 {
18915 Result result = static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
18916 return createResultValue( result, "vk::Device::bindImageMemory" );
18917 }
18918#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18919
18920 void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const
18921 {
18922 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
18923 }
18924
18925#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18926 template <typename Allocator = std::allocator<SparseImageMemoryRequirements>>
18927 std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image ) const
18928 {
18929 std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
18930 uint32_t sparseMemoryRequirementCount;
18931 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
18932 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
18933 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
18934 return sparseMemoryRequirements;
18935 }
18936#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18937
18938 Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
18939 {
18940 return static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
18941 }
18942
18943#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18944 ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
18945 {
18946 Fence fence;
18947 Result result = static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFence*>( &fence ) ) );
18948 return createResultValue( result, fence, "vk::Device::createFence" );
18949 }
18950#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18951
18952 void destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const
18953 {
18954 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
18955 }
18956
18957#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18958 void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr ) const
18959 {
18960 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
18961 }
18962#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18963
18964 Result resetFences( uint32_t fenceCount, const Fence* pFences ) const
18965 {
18966 return static_cast<Result>( vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
18967 }
18968
18969#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18970 ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences ) const
18971 {
18972 Result result = static_cast<Result>( vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
18973 return createResultValue( result, "vk::Device::resetFences" );
18974 }
18975#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18976
18977#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18978 Result getFenceStatus( Fence fence ) const
18979 {
18980 return static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
18981 }
18982#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18983
18984#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18985 Result getFenceStatus( Fence fence ) const
18986 {
18987 Result result = static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
18988 return createResultValue( result, "vk::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
18989 }
18990#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18991
18992 Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const
18993 {
18994 return static_cast<Result>( vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
18995 }
18996
18997#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18998 Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const
18999 {
19000 Result result = static_cast<Result>( vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
19001 return createResultValue( result, "vk::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
19002 }
19003#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19004
19005 Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const
19006 {
19007 return static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
19008 }
19009
19010#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19011 ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19012 {
19013 Semaphore semaphore;
19014 Result result = static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
19015 return createResultValue( result, semaphore, "vk::Device::createSemaphore" );
19016 }
19017#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19018
19019 void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const
19020 {
19021 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19022 }
19023
19024#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19025 void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr ) const
19026 {
19027 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19028 }
19029#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19030
19031 Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const
19032 {
19033 return static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
19034 }
19035
19036#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19037 ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19038 {
19039 Event event;
19040 Result result = static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkEvent*>( &event ) ) );
19041 return createResultValue( result, event, "vk::Device::createEvent" );
19042 }
19043#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19044
19045 void destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const
19046 {
19047 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19048 }
19049
19050#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19051 void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr ) const
19052 {
19053 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19054 }
19055#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19056
19057#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19058 Result getEventStatus( Event event ) const
19059 {
19060 return static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
19061 }
19062#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19063
19064#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19065 Result getEventStatus( Event event ) const
19066 {
19067 Result result = static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
19068 return createResultValue( result, "vk::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
19069 }
19070#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19071
19072#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19073 Result setEvent( Event event ) const
19074 {
19075 return static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
19076 }
19077#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19078
19079#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19080 ResultValueType<void>::type setEvent( Event event ) const
19081 {
19082 Result result = static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
19083 return createResultValue( result, "vk::Device::setEvent" );
19084 }
19085#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19086
19087#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19088 Result resetEvent( Event event ) const
19089 {
19090 return static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
19091 }
19092#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19093
19094#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19095 ResultValueType<void>::type resetEvent( Event event ) const
19096 {
19097 Result result = static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
19098 return createResultValue( result, "vk::Device::resetEvent" );
19099 }
19100#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19101
19102 Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const
19103 {
19104 return static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
19105 }
19106
19107#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19108 ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19109 {
19110 QueryPool queryPool;
19111 Result result = static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
19112 return createResultValue( result, queryPool, "vk::Device::createQueryPool" );
19113 }
19114#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19115
19116 void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const
19117 {
19118 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19119 }
19120
19121#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19122 void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
19123 {
19124 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19125 }
19126#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19127
19128 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const
19129 {
19130 return static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
19131 }
19132
19133#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19134 template <typename T>
19135 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const
19136 {
19137 Result result = static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
19138 return createResultValue( result, "vk::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
19139 }
19140#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19141
19142 Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const
19143 {
19144 return static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
19145 }
19146
19147#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19148 ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19149 {
19150 Buffer buffer;
19151 Result result = static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBuffer*>( &buffer ) ) );
19152 return createResultValue( result, buffer, "vk::Device::createBuffer" );
19153 }
19154#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19155
19156 void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const
19157 {
19158 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19159 }
19160
19161#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19162 void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr ) const
19163 {
19164 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19165 }
19166#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19167
19168 Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const
19169 {
19170 return static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
19171 }
19172
19173#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19174 ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19175 {
19176 BufferView view;
19177 Result result = static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBufferView*>( &view ) ) );
19178 return createResultValue( result, view, "vk::Device::createBufferView" );
19179 }
19180#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19181
19182 void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const
19183 {
19184 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19185 }
19186
19187#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19188 void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr ) const
19189 {
19190 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19191 }
19192#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19193
19194 Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const
19195 {
19196 return static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
19197 }
19198
19199#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19200 ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19201 {
19202 Image image;
19203 Result result = static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImage*>( &image ) ) );
19204 return createResultValue( result, image, "vk::Device::createImage" );
19205 }
19206#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19207
19208 void destroyImage( Image image, const AllocationCallbacks* pAllocator ) const
19209 {
19210 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19211 }
19212
19213#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19214 void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr ) const
19215 {
19216 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19217 }
19218#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19219
19220 void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const
19221 {
19222 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
19223 }
19224
19225#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19226 SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const
19227 {
19228 SubresourceLayout layout;
19229 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
19230 return layout;
19231 }
19232#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19233
19234 Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const
19235 {
19236 return static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
19237 }
19238
19239#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19240 ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19241 {
19242 ImageView view;
19243 Result result = static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImageView*>( &view ) ) );
19244 return createResultValue( result, view, "vk::Device::createImageView" );
19245 }
19246#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19247
19248 void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const
19249 {
19250 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19251 }
19252
19253#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19254 void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr ) const
19255 {
19256 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19257 }
19258#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19259
19260 Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const
19261 {
19262 return static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
19263 }
19264
19265#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19266 ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19267 {
19268 ShaderModule shaderModule;
19269 Result result = static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
19270 return createResultValue( result, shaderModule, "vk::Device::createShaderModule" );
19271 }
19272#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19273
19274 void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const
19275 {
19276 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19277 }
19278
19279#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19280 void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr ) const
19281 {
19282 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19283 }
19284#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19285
19286 Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const
19287 {
19288 return static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
19289 }
19290
19291#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19292 ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19293 {
19294 PipelineCache pipelineCache;
19295 Result result = static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
19296 return createResultValue( result, pipelineCache, "vk::Device::createPipelineCache" );
19297 }
19298#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19299
19300 void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const
19301 {
19302 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19303 }
19304
19305#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19306 void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr ) const
19307 {
19308 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19309 }
19310#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19311
19312 Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const
19313 {
19314 return static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
19315 }
19316
19317#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19318 template <typename Allocator = std::allocator<uint8_t>>
19319 typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache ) const
19320 {
19321 std::vector<uint8_t,Allocator> data;
19322 size_t dataSize;
19323 Result result;
19324 do
19325 {
19326 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
19327 if ( ( result == Result::eSuccess ) && dataSize )
19328 {
19329 data.resize( dataSize );
19330 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
19331 }
19332 } while ( result == Result::eIncomplete );
19333 assert( dataSize <= data.size() );
19334 data.resize( dataSize );
19335 return createResultValue( result, data, "vk::Device::getPipelineCacheData" );
19336 }
19337#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19338
19339 Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const
19340 {
19341 return static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
19342 }
19343
19344#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19345 ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const
19346 {
19347 Result result = static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
19348 return createResultValue( result, "vk::Device::mergePipelineCaches" );
19349 }
19350#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19351
19352 Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
19353 {
19354 return static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
19355 }
19356
19357#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19358 template <typename Allocator = std::allocator<Pipeline>>
19359 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
19360 {
19361 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
19362 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
19363 return createResultValue( result, pipelines, "vk::Device::createGraphicsPipelines" );
19364 }
19365
19366 ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19367 {
19368 Pipeline pipeline;
19369 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
19370 return createResultValue( result, pipeline, "vk::Device::createGraphicsPipeline" );
19371 }
19372#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19373
19374 Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
19375 {
19376 return static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
19377 }
19378
19379#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19380 template <typename Allocator = std::allocator<Pipeline>>
19381 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
19382 {
19383 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
19384 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
19385 return createResultValue( result, pipelines, "vk::Device::createComputePipelines" );
19386 }
19387
19388 ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19389 {
19390 Pipeline pipeline;
19391 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
19392 return createResultValue( result, pipeline, "vk::Device::createComputePipeline" );
19393 }
19394#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19395
19396 void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const
19397 {
19398 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19399 }
19400
19401#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19402 void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr ) const
19403 {
19404 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19405 }
19406#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19407
19408 Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const
19409 {
19410 return static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
19411 }
19412
19413#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19414 ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19415 {
19416 PipelineLayout pipelineLayout;
19417 Result result = static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
19418 return createResultValue( result, pipelineLayout, "vk::Device::createPipelineLayout" );
19419 }
19420#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19421
19422 void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const
19423 {
19424 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19425 }
19426
19427#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19428 void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
19429 {
19430 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19431 }
19432#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19433
19434 Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const
19435 {
19436 return static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
19437 }
19438
19439#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19440 ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19441 {
19442 Sampler sampler;
19443 Result result = static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSampler*>( &sampler ) ) );
19444 return createResultValue( result, sampler, "vk::Device::createSampler" );
19445 }
19446#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19447
19448 void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const
19449 {
19450 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19451 }
19452
19453#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19454 void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr ) const
19455 {
19456 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19457 }
19458#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19459
19460 Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const
19461 {
19462 return static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
19463 }
19464
19465#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19466 ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19467 {
19468 DescriptorSetLayout setLayout;
19469 Result result = static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
19470 return createResultValue( result, setLayout, "vk::Device::createDescriptorSetLayout" );
19471 }
19472#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19473
19474 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const
19475 {
19476 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19477 }
19478
19479#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19480 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
19481 {
19482 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19483 }
19484#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19485
19486 Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const
19487 {
19488 return static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
19489 }
19490
19491#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19492 ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19493 {
19494 DescriptorPool descriptorPool;
19495 Result result = static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
19496 return createResultValue( result, descriptorPool, "vk::Device::createDescriptorPool" );
19497 }
19498#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19499
19500 void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const
19501 {
19502 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19503 }
19504
19505#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19506 void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
19507 {
19508 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19509 }
19510#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19511
19512#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19513 Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
19514 {
19515 return static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
19516 }
19517#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19518
19519#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19520 ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags() ) const
19521 {
19522 Result result = static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
19523 return createResultValue( result, "vk::Device::resetDescriptorPool" );
19524 }
19525#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19526
19527 Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const
19528 {
19529 return static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
19530 }
19531
19532#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19533 template <typename Allocator = std::allocator<DescriptorSet>>
19534 typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const
19535 {
19536 std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
19537 Result result = static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
19538 return createResultValue( result, descriptorSets, "vk::Device::allocateDescriptorSets" );
19539 }
19540#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19541
19542 Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const
19543 {
19544 return static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
19545 }
19546
19547#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19548 ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const
19549 {
19550 Result result = static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
19551 return createResultValue( result, "vk::Device::freeDescriptorSets" );
19552 }
19553#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19554
19555 void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const
19556 {
19557 vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
19558 }
19559
19560#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19561 void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const
19562 {
19563 vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
19564 }
19565#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19566
19567 Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const
19568 {
19569 return static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
19570 }
19571
19572#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19573 ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19574 {
19575 Framebuffer framebuffer;
19576 Result result = static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
19577 return createResultValue( result, framebuffer, "vk::Device::createFramebuffer" );
19578 }
19579#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19580
19581 void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const
19582 {
19583 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19584 }
19585
19586#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19587 void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr ) const
19588 {
19589 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19590 }
19591#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19592
19593 Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const
19594 {
19595 return static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
19596 }
19597
19598#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19599 ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19600 {
19601 RenderPass renderPass;
19602 Result result = static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
19603 return createResultValue( result, renderPass, "vk::Device::createRenderPass" );
19604 }
19605#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19606
19607 void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const
19608 {
19609 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19610 }
19611
19612#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19613 void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr ) const
19614 {
19615 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19616 }
19617#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19618
19619 void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const
19620 {
19621 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
19622 }
19623
19624#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19625 Extent2D getRenderAreaGranularity( RenderPass renderPass ) const
19626 {
19627 Extent2D granularity;
19628 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
19629 return granularity;
19630 }
19631#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19632
19633 Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const
19634 {
19635 return static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
19636 }
19637
19638#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19639 ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19640 {
19641 CommandPool commandPool;
19642 Result result = static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
19643 return createResultValue( result, commandPool, "vk::Device::createCommandPool" );
19644 }
19645#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19646
19647 void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const
19648 {
19649 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19650 }
19651
19652#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19653 void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr ) const
19654 {
19655 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19656 }
19657#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19658
19659#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19660 Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
19661 {
19662 return static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
19663 }
19664#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19665
19666#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19667 ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
19668 {
19669 Result result = static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
19670 return createResultValue( result, "vk::Device::resetCommandPool" );
19671 }
19672#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19673
19674 Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const
19675 {
19676 return static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
19677 }
19678
19679#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19680 template <typename Allocator = std::allocator<CommandBuffer>>
19681 typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const
19682 {
19683 std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
19684 Result result = static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
19685 return createResultValue( result, commandBuffers, "vk::Device::allocateCommandBuffers" );
19686 }
19687#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19688
19689 void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
19690 {
19691 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
19692 }
19693
19694#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19695 void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const
19696 {
19697 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
19698 }
19699#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19700
19701 Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const
19702 {
19703 return static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
19704 }
19705
19706#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19707 template <typename Allocator = std::allocator<SwapchainKHR>>
19708 typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const
19709 {
19710 std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
19711 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
19712 return createResultValue( result, swapchains, "vk::Device::createSharedSwapchainsKHR" );
19713 }
19714
19715 ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19716 {
19717 SwapchainKHR swapchain;
19718 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
19719 return createResultValue( result, swapchain, "vk::Device::createSharedSwapchainKHR" );
19720 }
19721#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19722
19723 Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const
19724 {
19725 return static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
19726 }
19727
19728#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19729 ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19730 {
19731 SwapchainKHR swapchain;
19732 Result result = static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
19733 return createResultValue( result, swapchain, "vk::Device::createSwapchainKHR" );
19734 }
19735#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19736
19737 void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const
19738 {
19739 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19740 }
19741
19742#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19743 void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr ) const
19744 {
19745 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19746 }
19747#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19748
19749 Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const
19750 {
19751 return static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
19752 }
19753
19754#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19755 template <typename Allocator = std::allocator<Image>>
19756 typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain ) const
19757 {
19758 std::vector<Image,Allocator> swapchainImages;
19759 uint32_t swapchainImageCount;
19760 Result result;
19761 do
19762 {
19763 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
19764 if ( ( result == Result::eSuccess ) && swapchainImageCount )
19765 {
19766 swapchainImages.resize( swapchainImageCount );
19767 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
19768 }
19769 } while ( result == Result::eIncomplete );
19770 assert( swapchainImageCount <= swapchainImages.size() );
19771 swapchainImages.resize( swapchainImageCount );
19772 return createResultValue( result, swapchainImages, "vk::Device::getSwapchainImagesKHR" );
19773 }
19774#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19775
19776 Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const
19777 {
19778 return static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
19779 }
19780
19781#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19782 ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const
19783 {
19784 uint32_t imageIndex;
19785 Result result = static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
19786 return createResultValue( result, imageIndex, "vk::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
19787 }
19788#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19789
19790 Result debugMarkerSetObjectNameEXT( DebugMarkerObjectNameInfoEXT* pNameInfo ) const
19791 {
19792 return static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
19793 }
19794
19795#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19796 ResultValueType<DebugMarkerObjectNameInfoEXT>::type debugMarkerSetObjectNameEXT() const
19797 {
19798 DebugMarkerObjectNameInfoEXT nameInfo;
19799 Result result = static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
19800 return createResultValue( result, nameInfo, "vk::Device::debugMarkerSetObjectNameEXT" );
19801 }
19802#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19803
19804 Result debugMarkerSetObjectTagEXT( DebugMarkerObjectTagInfoEXT* pTagInfo ) const
19805 {
19806 return static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
19807 }
19808
19809#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19810 ResultValueType<DebugMarkerObjectTagInfoEXT>::type debugMarkerSetObjectTagEXT() const
19811 {
19812 DebugMarkerObjectTagInfoEXT tagInfo;
19813 Result result = static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
19814 return createResultValue( result, tagInfo, "vk::Device::debugMarkerSetObjectTagEXT" );
19815 }
19816#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19817
Lenny Komow96962992016-08-31 15:03:49 -060019818#ifdef VK_USE_PLATFORM_WIN32_KHR
19819 Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const
19820 {
19821 return static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
19822 }
19823#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19824
19825#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19826#ifdef VK_USE_PLATFORM_WIN32_KHR
19827 ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const
19828 {
19829 HANDLE handle;
19830 Result result = static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
19831 return createResultValue( result, handle, "vk::Device::getMemoryWin32HandleNV" );
19832 }
19833#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19834#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19835
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070019836 Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
19837 {
19838 return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
19839 }
19840
19841#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19842 ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19843 {
19844 IndirectCommandsLayoutNVX indirectCommandsLayout;
19845 Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
19846 return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
19847 }
19848#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19849
19850 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
19851 {
19852 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19853 }
19854
19855#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19856 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const
19857 {
19858 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19859 }
19860#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19861
19862 Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
19863 {
19864 return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
19865 }
19866
19867#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19868 ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
19869 {
19870 ObjectTableNVX objectTable;
19871 Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
19872 return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
19873 }
19874#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19875
19876 void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
19877 {
19878 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19879 }
19880
19881#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19882 void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const
19883 {
19884 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19885 }
19886#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19887
19888 Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
19889 {
19890 return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
19891 }
19892
19893#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19894 ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
19895 {
19896#ifdef VULKAN_HPP_NO_EXCEPTIONS
19897 assert( pObjectTableEntries.size() == objectIndices.size() );
19898#else
19899 if ( pObjectTableEntries.size() != objectIndices.size() )
19900 {
19901 throw std::logic_error( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
19902 }
19903#endif // VULKAN_HPP_NO_EXCEPTIONS
19904 Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
19905 return createResultValue( result, "vk::Device::registerObjectsNVX" );
19906 }
19907#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19908
19909 Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
19910 {
19911 return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
19912 }
19913
19914#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19915 ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
19916 {
19917#ifdef VULKAN_HPP_NO_EXCEPTIONS
19918 assert( objectEntryTypes.size() == objectIndices.size() );
19919#else
19920 if ( objectEntryTypes.size() != objectIndices.size() )
19921 {
19922 throw std::logic_error( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
19923 }
19924#endif // VULKAN_HPP_NO_EXCEPTIONS
19925 Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
19926 return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
19927 }
19928#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19929
Mark Youngb5f087a2017-01-19 21:10:49 -070019930#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19931 void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags ) const
19932 {
19933 vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlagsKHR>( flags ) );
19934 }
19935#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19936
19937#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19938 void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags = CommandPoolTrimFlagsKHR() ) const
19939 {
19940 vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlagsKHR>( flags ) );
19941 }
19942#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19943
19944 Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo ) const
19945 {
19946 return static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
19947 }
19948
19949#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19950 ResultValueType<void>::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo ) const
19951 {
19952 Result result = static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
19953 return createResultValue( result, "vk::Device::displayPowerControlEXT" );
19954 }
19955#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19956
19957 Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
19958 {
19959 return static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
19960 }
19961
19962#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19963 ResultValueType<Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, const AllocationCallbacks & allocator ) const
19964 {
19965 Fence fence;
19966 Result result = static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
19967 return createResultValue( result, fence, "vk::Device::registerEventEXT" );
19968 }
19969#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19970
19971 Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
19972 {
19973 return static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
19974 }
19975
19976#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19977 ResultValueType<Fence>::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, const AllocationCallbacks & allocator ) const
19978 {
19979 Fence fence;
19980 Result result = static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
19981 return createResultValue( result, fence, "vk::Device::registerDisplayEventEXT" );
19982 }
19983#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19984
19985 Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const
19986 {
19987 return static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
19988 }
19989
19990#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19991 ResultValue<uint64_t> getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter ) const
19992 {
19993 uint64_t counterValue;
19994 Result result = static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
19995 return createResultValue( result, counterValue, "vk::Device::getSwapchainCounterEXT", { Result::eSuccess, Result::eErrorDeviceLost, Result::eErrorOutOfDateKHR } );
19996 }
19997#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19998
Lenny Komowb0a17f22016-08-11 11:23:15 -060019999#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
20000 explicit
20001#endif
20002 operator VkDevice() const
20003 {
20004 return m_device;
20005 }
20006
20007 explicit operator bool() const
20008 {
20009 return m_device != VK_NULL_HANDLE;
20010 }
20011
20012 bool operator!() const
20013 {
20014 return m_device == VK_NULL_HANDLE;
20015 }
20016
20017 private:
20018 VkDevice m_device;
20019 };
20020 static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
20021
20022 class PhysicalDevice
20023 {
20024 public:
20025 PhysicalDevice()
20026 : m_physicalDevice(VK_NULL_HANDLE)
20027 {}
20028
20029#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
20030 PhysicalDevice(VkPhysicalDevice physicalDevice)
20031 : m_physicalDevice(physicalDevice)
20032 {}
20033
20034 PhysicalDevice& operator=(VkPhysicalDevice physicalDevice)
20035 {
20036 m_physicalDevice = physicalDevice;
20037 return *this;
20038 }
20039#endif
20040
Lenny Komow339ffcd2016-08-26 14:10:08 -060020041 bool operator==(PhysicalDevice const &rhs) const
20042 {
20043 return m_physicalDevice == rhs.m_physicalDevice;
20044 }
20045
20046 bool operator!=(PhysicalDevice const &rhs) const
20047 {
20048 return m_physicalDevice != rhs.m_physicalDevice;
20049 }
20050
20051 bool operator<(PhysicalDevice const &rhs) const
20052 {
20053 return m_physicalDevice < rhs.m_physicalDevice;
20054 }
20055
Lenny Komowb0a17f22016-08-11 11:23:15 -060020056 void getProperties( PhysicalDeviceProperties* pProperties ) const
20057 {
20058 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
20059 }
20060
20061#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20062 PhysicalDeviceProperties getProperties() const
20063 {
20064 PhysicalDeviceProperties properties;
20065 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
20066 return properties;
20067 }
20068#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20069
20070 void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const
20071 {
20072 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
20073 }
20074
20075#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20076 template <typename Allocator = std::allocator<QueueFamilyProperties>>
20077 std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties() const
20078 {
20079 std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
20080 uint32_t queueFamilyPropertyCount;
20081 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
20082 queueFamilyProperties.resize( queueFamilyPropertyCount );
20083 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
20084 return queueFamilyProperties;
20085 }
20086#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20087
20088 void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const
20089 {
20090 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
20091 }
20092
20093#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20094 PhysicalDeviceMemoryProperties getMemoryProperties() const
20095 {
20096 PhysicalDeviceMemoryProperties memoryProperties;
20097 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
20098 return memoryProperties;
20099 }
20100#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20101
20102 void getFeatures( PhysicalDeviceFeatures* pFeatures ) const
20103 {
20104 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
20105 }
20106
20107#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20108 PhysicalDeviceFeatures getFeatures() const
20109 {
20110 PhysicalDeviceFeatures features;
20111 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
20112 return features;
20113 }
20114#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20115
20116 void getFormatProperties( Format format, FormatProperties* pFormatProperties ) const
20117 {
20118 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
20119 }
20120
20121#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20122 FormatProperties getFormatProperties( Format format ) const
20123 {
20124 FormatProperties formatProperties;
20125 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
20126 return formatProperties;
20127 }
20128#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20129
20130 Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const
20131 {
20132 return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
20133 }
20134
20135#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20136 ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const
20137 {
20138 ImageFormatProperties imageFormatProperties;
20139 Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
20140 return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties" );
20141 }
20142#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20143
20144 Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const
20145 {
20146 return static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
20147 }
20148
20149#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20150 ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20151 {
20152 Device device;
20153 Result result = static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDevice*>( &device ) ) );
20154 return createResultValue( result, device, "vk::PhysicalDevice::createDevice" );
20155 }
20156#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20157
20158 Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const
20159 {
20160 return static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
20161 }
20162
20163#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20164 template <typename Allocator = std::allocator<LayerProperties>>
20165 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties() const
20166 {
20167 std::vector<LayerProperties,Allocator> properties;
20168 uint32_t propertyCount;
20169 Result result;
20170 do
20171 {
20172 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
20173 if ( ( result == Result::eSuccess ) && propertyCount )
20174 {
20175 properties.resize( propertyCount );
20176 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
20177 }
20178 } while ( result == Result::eIncomplete );
20179 assert( propertyCount <= properties.size() );
20180 properties.resize( propertyCount );
20181 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceLayerProperties" );
20182 }
20183#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20184
20185 Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const
20186 {
20187 return static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
20188 }
20189
20190#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20191 template <typename Allocator = std::allocator<ExtensionProperties>>
20192 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr ) const
20193 {
20194 std::vector<ExtensionProperties,Allocator> properties;
20195 uint32_t propertyCount;
20196 Result result;
20197 do
20198 {
20199 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
20200 if ( ( result == Result::eSuccess ) && propertyCount )
20201 {
20202 properties.resize( propertyCount );
20203 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
20204 }
20205 } while ( result == Result::eIncomplete );
20206 assert( propertyCount <= properties.size() );
20207 properties.resize( propertyCount );
20208 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceExtensionProperties" );
20209 }
20210#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20211
20212 void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const
20213 {
20214 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
20215 }
20216
20217#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20218 template <typename Allocator = std::allocator<SparseImageFormatProperties>>
20219 std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const
20220 {
20221 std::vector<SparseImageFormatProperties,Allocator> properties;
20222 uint32_t propertyCount;
20223 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
20224 properties.resize( propertyCount );
20225 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
20226 return properties;
20227 }
20228#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20229
20230 Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const
20231 {
20232 return static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
20233 }
20234
20235#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20236 template <typename Allocator = std::allocator<DisplayPropertiesKHR>>
20237 typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR() const
20238 {
20239 std::vector<DisplayPropertiesKHR,Allocator> properties;
20240 uint32_t propertyCount;
20241 Result result;
20242 do
20243 {
20244 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
20245 if ( ( result == Result::eSuccess ) && propertyCount )
20246 {
20247 properties.resize( propertyCount );
20248 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
20249 }
20250 } while ( result == Result::eIncomplete );
20251 assert( propertyCount <= properties.size() );
20252 properties.resize( propertyCount );
20253 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPropertiesKHR" );
20254 }
20255#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20256
20257 Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const
20258 {
20259 return static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
20260 }
20261
20262#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20263 template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>>
20264 typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR() const
20265 {
20266 std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
20267 uint32_t propertyCount;
20268 Result result;
20269 do
20270 {
20271 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
20272 if ( ( result == Result::eSuccess ) && propertyCount )
20273 {
20274 properties.resize( propertyCount );
20275 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
20276 }
20277 } while ( result == Result::eIncomplete );
20278 assert( propertyCount <= properties.size() );
20279 properties.resize( propertyCount );
20280 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPlanePropertiesKHR" );
20281 }
20282#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20283
20284 Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const
20285 {
20286 return static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
20287 }
20288
20289#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20290 template <typename Allocator = std::allocator<DisplayKHR>>
20291 typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
20292 {
20293 std::vector<DisplayKHR,Allocator> displays;
20294 uint32_t displayCount;
20295 Result result;
20296 do
20297 {
20298 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
20299 if ( ( result == Result::eSuccess ) && displayCount )
20300 {
20301 displays.resize( displayCount );
20302 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
20303 }
20304 } while ( result == Result::eIncomplete );
20305 assert( displayCount <= displays.size() );
20306 displays.resize( displayCount );
20307 return createResultValue( result, displays, "vk::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
20308 }
20309#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20310
20311 Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const
20312 {
20313 return static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
20314 }
20315
20316#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20317 template <typename Allocator = std::allocator<DisplayModePropertiesKHR>>
20318 typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display ) const
20319 {
20320 std::vector<DisplayModePropertiesKHR,Allocator> properties;
20321 uint32_t propertyCount;
20322 Result result;
20323 do
20324 {
20325 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
20326 if ( ( result == Result::eSuccess ) && propertyCount )
20327 {
20328 properties.resize( propertyCount );
20329 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
20330 }
20331 } while ( result == Result::eIncomplete );
20332 assert( propertyCount <= properties.size() );
20333 properties.resize( propertyCount );
20334 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayModePropertiesKHR" );
20335 }
20336#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20337
20338 Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const
20339 {
20340 return static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
20341 }
20342
20343#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20344 ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20345 {
20346 DisplayModeKHR mode;
20347 Result result = static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
20348 return createResultValue( result, mode, "vk::PhysicalDevice::createDisplayModeKHR" );
20349 }
20350#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20351
20352 Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const
20353 {
20354 return static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
20355 }
20356
20357#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20358 ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const
20359 {
20360 DisplayPlaneCapabilitiesKHR capabilities;
20361 Result result = static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
20362 return createResultValue( result, capabilities, "vk::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
20363 }
20364#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20365
20366#ifdef VK_USE_PLATFORM_MIR_KHR
20367 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const
20368 {
20369 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection );
20370 }
20371#endif /*VK_USE_PLATFORM_MIR_KHR*/
20372
20373#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20374#ifdef VK_USE_PLATFORM_MIR_KHR
20375 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const
20376 {
20377 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection );
20378 }
20379#endif /*VK_USE_PLATFORM_MIR_KHR*/
20380#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20381
20382 Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const
20383 {
20384 return static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
20385 }
20386
20387#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20388 ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const
20389 {
20390 Bool32 supported;
20391 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
20392 return createResultValue( result, supported, "vk::PhysicalDevice::getSurfaceSupportKHR" );
20393 }
20394#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20395
20396 Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const
20397 {
20398 return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
20399 }
20400
20401#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20402 ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const
20403 {
20404 SurfaceCapabilitiesKHR surfaceCapabilities;
20405 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
20406 return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilitiesKHR" );
20407 }
20408#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20409
20410 Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const
20411 {
20412 return static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
20413 }
20414
20415#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20416 template <typename Allocator = std::allocator<SurfaceFormatKHR>>
20417 typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface ) const
20418 {
20419 std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
20420 uint32_t surfaceFormatCount;
20421 Result result;
20422 do
20423 {
20424 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
20425 if ( ( result == Result::eSuccess ) && surfaceFormatCount )
20426 {
20427 surfaceFormats.resize( surfaceFormatCount );
20428 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
20429 }
20430 } while ( result == Result::eIncomplete );
20431 assert( surfaceFormatCount <= surfaceFormats.size() );
20432 surfaceFormats.resize( surfaceFormatCount );
20433 return createResultValue( result, surfaceFormats, "vk::PhysicalDevice::getSurfaceFormatsKHR" );
20434 }
20435#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20436
20437 Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const
20438 {
20439 return static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
20440 }
20441
20442#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20443 template <typename Allocator = std::allocator<PresentModeKHR>>
20444 typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface ) const
20445 {
20446 std::vector<PresentModeKHR,Allocator> presentModes;
20447 uint32_t presentModeCount;
20448 Result result;
20449 do
20450 {
20451 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
20452 if ( ( result == Result::eSuccess ) && presentModeCount )
20453 {
20454 presentModes.resize( presentModeCount );
20455 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
20456 }
20457 } while ( result == Result::eIncomplete );
20458 assert( presentModeCount <= presentModes.size() );
20459 presentModes.resize( presentModeCount );
20460 return createResultValue( result, presentModes, "vk::PhysicalDevice::getSurfacePresentModesKHR" );
20461 }
20462#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20463
20464#ifdef VK_USE_PLATFORM_WAYLAND_KHR
20465 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const
20466 {
20467 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
20468 }
20469#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20470
20471#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20472#ifdef VK_USE_PLATFORM_WAYLAND_KHR
20473 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const
20474 {
20475 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
20476 }
20477#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20478#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20479
20480#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20481#ifdef VK_USE_PLATFORM_WIN32_KHR
20482 Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
20483 {
20484 return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
20485 }
20486#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20487#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20488
20489#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20490#ifdef VK_USE_PLATFORM_WIN32_KHR
20491 Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
20492 {
20493 return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
20494 }
20495#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20496#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20497
20498#ifdef VK_USE_PLATFORM_XLIB_KHR
20499 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const
20500 {
20501 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
20502 }
20503#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20504
20505#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20506#ifdef VK_USE_PLATFORM_XLIB_KHR
20507 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const
20508 {
20509 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
20510 }
20511#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20512#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20513
20514#ifdef VK_USE_PLATFORM_XCB_KHR
20515 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const
20516 {
20517 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
20518 }
20519#endif /*VK_USE_PLATFORM_XCB_KHR*/
20520
20521#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20522#ifdef VK_USE_PLATFORM_XCB_KHR
20523 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const
20524 {
20525 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
20526 }
20527#endif /*VK_USE_PLATFORM_XCB_KHR*/
20528#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20529
Lenny Komow96962992016-08-31 15:03:49 -060020530 Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const
20531 {
20532 return static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
20533 }
20534
20535#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20536 ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
20537 {
20538 ExternalImageFormatPropertiesNV externalImageFormatProperties;
20539 Result result = static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
20540 return createResultValue( result, externalImageFormatProperties, "vk::PhysicalDevice::getExternalImageFormatPropertiesNV" );
20541 }
20542#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20543
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070020544 void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
20545 {
20546 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
20547 }
20548
20549#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20550 void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, DeviceGeneratedCommandsLimitsNVX & limits ) const
20551 {
20552 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
20553 }
20554#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20555
Mark Youngb5f087a2017-01-19 21:10:49 -070020556 void getFeatures2KHR( PhysicalDeviceFeatures2KHR* pFeatures ) const
20557 {
20558 vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( pFeatures ) );
20559 }
20560
20561#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20562 PhysicalDeviceFeatures2KHR getFeatures2KHR() const
20563 {
20564 PhysicalDeviceFeatures2KHR features;
20565 vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( &features ) );
20566 return features;
20567 }
20568#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20569
20570 void getProperties2KHR( PhysicalDeviceProperties2KHR* pProperties ) const
20571 {
20572 vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( pProperties ) );
20573 }
20574
20575#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20576 PhysicalDeviceProperties2KHR getProperties2KHR() const
20577 {
20578 PhysicalDeviceProperties2KHR properties;
20579 vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( &properties ) );
20580 return properties;
20581 }
20582#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20583
20584 void getFormatProperties2KHR( Format format, FormatProperties2KHR* pFormatProperties ) const
20585 {
20586 vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( pFormatProperties ) );
20587 }
20588
20589#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20590 FormatProperties2KHR getFormatProperties2KHR( Format format ) const
20591 {
20592 FormatProperties2KHR formatProperties;
20593 vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( &formatProperties ) );
20594 return formatProperties;
20595 }
20596#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20597
20598 Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, ImageFormatProperties2KHR* pImageFormatProperties ) const
20599 {
20600 return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( pImageFormatProperties ) ) );
20601 }
20602
20603#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20604 ResultValueType<ImageFormatProperties2KHR>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR & imageFormatInfo ) const
20605 {
20606 ImageFormatProperties2KHR imageFormatProperties;
20607 Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( &imageFormatProperties ) ) );
20608 return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties2KHR" );
20609 }
20610#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20611
20612 void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2KHR* pQueueFamilyProperties ) const
20613 {
20614 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( pQueueFamilyProperties ) );
20615 }
20616
20617#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20618 template <typename Allocator = std::allocator<QueueFamilyProperties2KHR>>
20619 std::vector<QueueFamilyProperties2KHR,Allocator> getQueueFamilyProperties2KHR() const
20620 {
20621 std::vector<QueueFamilyProperties2KHR,Allocator> queueFamilyProperties;
20622 uint32_t queueFamilyPropertyCount;
20623 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
20624 queueFamilyProperties.resize( queueFamilyPropertyCount );
20625 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( queueFamilyProperties.data() ) );
20626 return queueFamilyProperties;
20627 }
20628#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20629
20630 void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2KHR* pMemoryProperties ) const
20631 {
20632 vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( pMemoryProperties ) );
20633 }
20634
20635#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20636 PhysicalDeviceMemoryProperties2KHR getMemoryProperties2KHR() const
20637 {
20638 PhysicalDeviceMemoryProperties2KHR memoryProperties;
20639 vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( &memoryProperties ) );
20640 return memoryProperties;
20641 }
20642#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20643
20644 void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2KHR* pProperties ) const
20645 {
20646 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( pProperties ) );
20647 }
20648
20649#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20650 template <typename Allocator = std::allocator<SparseImageFormatProperties2KHR>>
20651 std::vector<SparseImageFormatProperties2KHR,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR & formatInfo ) const
20652 {
20653 std::vector<SparseImageFormatProperties2KHR,Allocator> properties;
20654 uint32_t propertyCount;
20655 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, nullptr );
20656 properties.resize( propertyCount );
20657 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( properties.data() ) );
20658 return properties;
20659 }
20660#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20661
20662#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20663 Result releaseDisplayEXT( DisplayKHR display ) const
20664 {
20665 return static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
20666 }
20667#endif /*!VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20668
20669#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20670 ResultValueType<void>::type releaseDisplayEXT( DisplayKHR display ) const
20671 {
20672 Result result = static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
20673 return createResultValue( result, "vk::PhysicalDevice::releaseDisplayEXT" );
20674 }
20675#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20676
20677#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
20678 Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display ) const
20679 {
20680 return static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
20681 }
20682#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
20683
20684#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20685#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
20686 ResultValueType<Display>::type acquireXlibDisplayEXT( DisplayKHR display ) const
20687 {
20688 Display dpy;
20689 Result result = static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
20690 return createResultValue( result, dpy, "vk::PhysicalDevice::acquireXlibDisplayEXT" );
20691 }
20692#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
20693#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20694
20695#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
20696 Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay ) const
20697 {
20698 return static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
20699 }
20700#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
20701
20702#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20703#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
20704 ResultValueType<void>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, DisplayKHR & display ) const
20705 {
20706 Result result = static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
20707 return createResultValue( result, "vk::PhysicalDevice::getRandROutputDisplayEXT" );
20708 }
20709#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
20710#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20711
20712 Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities ) const
20713 {
20714 return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
20715 }
20716
20717#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20718 ResultValueType<SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( SurfaceKHR surface ) const
20719 {
20720 SurfaceCapabilities2EXT surfaceCapabilities;
20721 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
20722 return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilities2EXT" );
20723 }
20724#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20725
Lenny Komowb0a17f22016-08-11 11:23:15 -060020726#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
20727 explicit
20728#endif
20729 operator VkPhysicalDevice() const
20730 {
20731 return m_physicalDevice;
20732 }
20733
20734 explicit operator bool() const
20735 {
20736 return m_physicalDevice != VK_NULL_HANDLE;
20737 }
20738
20739 bool operator!() const
20740 {
20741 return m_physicalDevice == VK_NULL_HANDLE;
20742 }
20743
20744 private:
20745 VkPhysicalDevice m_physicalDevice;
20746 };
20747 static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
20748
20749 class Instance
20750 {
20751 public:
20752 Instance()
20753 : m_instance(VK_NULL_HANDLE)
20754 {}
20755
20756#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
20757 Instance(VkInstance instance)
20758 : m_instance(instance)
20759 {}
20760
20761 Instance& operator=(VkInstance instance)
20762 {
20763 m_instance = instance;
20764 return *this;
20765 }
20766#endif
20767
Lenny Komow339ffcd2016-08-26 14:10:08 -060020768 bool operator==(Instance const &rhs) const
20769 {
20770 return m_instance == rhs.m_instance;
20771 }
20772
20773 bool operator!=(Instance const &rhs) const
20774 {
20775 return m_instance != rhs.m_instance;
20776 }
20777
20778 bool operator<(Instance const &rhs) const
20779 {
20780 return m_instance < rhs.m_instance;
20781 }
20782
Lenny Komowb0a17f22016-08-11 11:23:15 -060020783 void destroy( const AllocationCallbacks* pAllocator ) const
20784 {
20785 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20786 }
20787
20788#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20789 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const
20790 {
20791 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20792 }
20793#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20794
20795 Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const
20796 {
20797 return static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
20798 }
20799
20800#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20801 template <typename Allocator = std::allocator<PhysicalDevice>>
20802 typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices() const
20803 {
20804 std::vector<PhysicalDevice,Allocator> physicalDevices;
20805 uint32_t physicalDeviceCount;
20806 Result result;
20807 do
20808 {
20809 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
20810 if ( ( result == Result::eSuccess ) && physicalDeviceCount )
20811 {
20812 physicalDevices.resize( physicalDeviceCount );
20813 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
20814 }
20815 } while ( result == Result::eIncomplete );
20816 assert( physicalDeviceCount <= physicalDevices.size() );
20817 physicalDevices.resize( physicalDeviceCount );
20818 return createResultValue( result, physicalDevices, "vk::Instance::enumeratePhysicalDevices" );
20819 }
20820#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20821
20822 PFN_vkVoidFunction getProcAddr( const char* pName ) const
20823 {
20824 return vkGetInstanceProcAddr( m_instance, pName );
20825 }
20826
20827#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20828 PFN_vkVoidFunction getProcAddr( const std::string & name ) const
20829 {
20830 return vkGetInstanceProcAddr( m_instance, name.c_str() );
20831 }
20832#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20833
20834#ifdef VK_USE_PLATFORM_ANDROID_KHR
20835 Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20836 {
20837 return static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20838 }
20839#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
20840
20841#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20842#ifdef VK_USE_PLATFORM_ANDROID_KHR
20843 ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20844 {
20845 SurfaceKHR surface;
20846 Result result = static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20847 return createResultValue( result, surface, "vk::Instance::createAndroidSurfaceKHR" );
20848 }
20849#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
20850#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20851
20852 Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20853 {
20854 return static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20855 }
20856
20857#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20858 ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20859 {
20860 SurfaceKHR surface;
20861 Result result = static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20862 return createResultValue( result, surface, "vk::Instance::createDisplayPlaneSurfaceKHR" );
20863 }
20864#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20865
20866#ifdef VK_USE_PLATFORM_MIR_KHR
20867 Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20868 {
20869 return static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20870 }
20871#endif /*VK_USE_PLATFORM_MIR_KHR*/
20872
20873#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20874#ifdef VK_USE_PLATFORM_MIR_KHR
20875 ResultValueType<SurfaceKHR>::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20876 {
20877 SurfaceKHR surface;
20878 Result result = static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20879 return createResultValue( result, surface, "vk::Instance::createMirSurfaceKHR" );
20880 }
20881#endif /*VK_USE_PLATFORM_MIR_KHR*/
20882#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20883
20884 void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const
20885 {
20886 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20887 }
20888
20889#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20890 void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr ) const
20891 {
20892 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20893 }
20894#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20895
Mark Youngb5f087a2017-01-19 21:10:49 -070020896#ifdef VK_USE_PLATFORM_VI_NN
20897 Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20898 {
20899 return static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20900 }
20901#endif /*VK_USE_PLATFORM_VI_NN*/
20902
20903#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20904#ifdef VK_USE_PLATFORM_VI_NN
20905 ResultValueType<SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20906 {
20907 SurfaceKHR surface;
20908 Result result = static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20909 return createResultValue( result, surface, "vk::Instance::createViSurfaceNN" );
20910 }
20911#endif /*VK_USE_PLATFORM_VI_NN*/
20912#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20913
Lenny Komowb0a17f22016-08-11 11:23:15 -060020914#ifdef VK_USE_PLATFORM_WAYLAND_KHR
20915 Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20916 {
20917 return static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20918 }
20919#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20920
20921#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20922#ifdef VK_USE_PLATFORM_WAYLAND_KHR
20923 ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20924 {
20925 SurfaceKHR surface;
20926 Result result = static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20927 return createResultValue( result, surface, "vk::Instance::createWaylandSurfaceKHR" );
20928 }
20929#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
20930#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20931
20932#ifdef VK_USE_PLATFORM_WIN32_KHR
20933 Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20934 {
20935 return static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20936 }
20937#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20938
20939#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20940#ifdef VK_USE_PLATFORM_WIN32_KHR
20941 ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20942 {
20943 SurfaceKHR surface;
20944 Result result = static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20945 return createResultValue( result, surface, "vk::Instance::createWin32SurfaceKHR" );
20946 }
20947#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20948#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20949
20950#ifdef VK_USE_PLATFORM_XLIB_KHR
20951 Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20952 {
20953 return static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20954 }
20955#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20956
20957#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20958#ifdef VK_USE_PLATFORM_XLIB_KHR
20959 ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20960 {
20961 SurfaceKHR surface;
20962 Result result = static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20963 return createResultValue( result, surface, "vk::Instance::createXlibSurfaceKHR" );
20964 }
20965#endif /*VK_USE_PLATFORM_XLIB_KHR*/
20966#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20967
20968#ifdef VK_USE_PLATFORM_XCB_KHR
20969 Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
20970 {
20971 return static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
20972 }
20973#endif /*VK_USE_PLATFORM_XCB_KHR*/
20974
20975#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20976#ifdef VK_USE_PLATFORM_XCB_KHR
20977 ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20978 {
20979 SurfaceKHR surface;
20980 Result result = static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
20981 return createResultValue( result, surface, "vk::Instance::createXcbSurfaceKHR" );
20982 }
20983#endif /*VK_USE_PLATFORM_XCB_KHR*/
20984#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20985
20986 Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const
20987 {
20988 return static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
20989 }
20990
20991#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20992 ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const
20993 {
20994 DebugReportCallbackEXT callback;
20995 Result result = static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
20996 return createResultValue( result, callback, "vk::Instance::createDebugReportCallbackEXT" );
20997 }
20998#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20999
21000 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const
21001 {
21002 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
21003 }
21004
21005#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21006 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr ) const
21007 {
21008 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
21009 }
21010#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21011
21012 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const
21013 {
21014 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
21015 }
21016
21017#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21018 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
21019 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021020#ifdef VULKAN_HPP_NO_EXCEPTIONS
21021 assert( layerPrefix.size() == message.size() );
21022#else
21023 if ( layerPrefix.size() != message.size() )
21024 {
21025 throw std::logic_error( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
21026 }
21027#endif // VULKAN_HPP_NO_EXCEPTIONS
Lenny Komowb0a17f22016-08-11 11:23:15 -060021028 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
21029 }
21030#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21031
21032#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
21033 explicit
21034#endif
21035 operator VkInstance() const
21036 {
21037 return m_instance;
21038 }
21039
21040 explicit operator bool() const
21041 {
21042 return m_instance != VK_NULL_HANDLE;
21043 }
21044
21045 bool operator!() const
21046 {
21047 return m_instance == VK_NULL_HANDLE;
21048 }
21049
21050 private:
21051 VkInstance m_instance;
21052 };
21053 static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
21054
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021055 struct CmdProcessCommandsInfoNVX
Lenny Komow5b8df842016-09-29 14:16:59 -060021056 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021057 CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
21058 : sType( StructureType::eCmdProcessCommandsInfoNVX )
Lenny Komow5b8df842016-09-29 14:16:59 -060021059 , pNext( nullptr )
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021060 , objectTable( objectTable_ )
21061 , indirectCommandsLayout( indirectCommandsLayout_ )
21062 , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
21063 , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
21064 , maxSequencesCount( maxSequencesCount_ )
21065 , targetCommandBuffer( targetCommandBuffer_ )
21066 , sequencesCountBuffer( sequencesCountBuffer_ )
21067 , sequencesCountOffset( sequencesCountOffset_ )
21068 , sequencesIndexBuffer( sequencesIndexBuffer_ )
21069 , sequencesIndexOffset( sequencesIndexOffset_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021070 {
21071 }
21072
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021073 CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow5b8df842016-09-29 14:16:59 -060021074 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021075 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow5b8df842016-09-29 14:16:59 -060021076 }
21077
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021078 CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow5b8df842016-09-29 14:16:59 -060021079 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021080 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow5b8df842016-09-29 14:16:59 -060021081 return *this;
21082 }
21083
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021084 CmdProcessCommandsInfoNVX& setSType( StructureType sType_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021085 {
21086 sType = sType_;
21087 return *this;
21088 }
21089
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021090 CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021091 {
21092 pNext = pNext_;
21093 return *this;
21094 }
21095
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021096 CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021097 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021098 objectTable = objectTable_;
Lenny Komow5b8df842016-09-29 14:16:59 -060021099 return *this;
21100 }
21101
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021102 CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021103 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021104 indirectCommandsLayout = indirectCommandsLayout_;
Lenny Komow5b8df842016-09-29 14:16:59 -060021105 return *this;
21106 }
21107
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021108 CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
Lenny Komow5b8df842016-09-29 14:16:59 -060021109 {
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021110 indirectCommandsTokenCount = indirectCommandsTokenCount_;
21111 return *this;
Lenny Komow5b8df842016-09-29 14:16:59 -060021112 }
21113
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021114 CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
21115 {
21116 pIndirectCommandsTokens = pIndirectCommandsTokens_;
21117 return *this;
21118 }
21119
21120 CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
21121 {
21122 maxSequencesCount = maxSequencesCount_;
21123 return *this;
21124 }
21125
21126 CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
21127 {
21128 targetCommandBuffer = targetCommandBuffer_;
21129 return *this;
21130 }
21131
21132 CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
21133 {
21134 sequencesCountBuffer = sequencesCountBuffer_;
21135 return *this;
21136 }
21137
21138 CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
21139 {
21140 sequencesCountOffset = sequencesCountOffset_;
21141 return *this;
21142 }
21143
21144 CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
21145 {
21146 sequencesIndexBuffer = sequencesIndexBuffer_;
21147 return *this;
21148 }
21149
21150 CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
21151 {
21152 sequencesIndexOffset = sequencesIndexOffset_;
21153 return *this;
21154 }
21155
21156 operator const VkCmdProcessCommandsInfoNVX&() const
21157 {
21158 return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
21159 }
21160
21161 bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow5b8df842016-09-29 14:16:59 -060021162 {
21163 return ( sType == rhs.sType )
21164 && ( pNext == rhs.pNext )
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021165 && ( objectTable == rhs.objectTable )
21166 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
21167 && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
21168 && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
21169 && ( maxSequencesCount == rhs.maxSequencesCount )
21170 && ( targetCommandBuffer == rhs.targetCommandBuffer )
21171 && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
21172 && ( sequencesCountOffset == rhs.sequencesCountOffset )
21173 && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
21174 && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
Lenny Komow5b8df842016-09-29 14:16:59 -060021175 }
21176
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021177 bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow5b8df842016-09-29 14:16:59 -060021178 {
21179 return !operator==( rhs );
21180 }
21181
21182 private:
21183 StructureType sType;
21184
21185 public:
21186 const void* pNext;
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021187 ObjectTableNVX objectTable;
21188 IndirectCommandsLayoutNVX indirectCommandsLayout;
21189 uint32_t indirectCommandsTokenCount;
21190 const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
21191 uint32_t maxSequencesCount;
21192 CommandBuffer targetCommandBuffer;
21193 Buffer sequencesCountBuffer;
21194 DeviceSize sequencesCountOffset;
21195 Buffer sequencesIndexBuffer;
21196 DeviceSize sequencesIndexOffset;
Lenny Komow5b8df842016-09-29 14:16:59 -060021197 };
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021198 static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
Lenny Komow5b8df842016-09-29 14:16:59 -060021199
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021200 VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
Lenny Komowb0a17f22016-08-11 11:23:15 -060021201 {
21202 return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
21203 }
21204
21205#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021206 VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr )
Lenny Komowb0a17f22016-08-11 11:23:15 -060021207 {
21208 Instance instance;
21209 Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkInstance*>( &instance ) ) );
21210 return createResultValue( result, instance, "vk::createInstance" );
21211 }
21212#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21213
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021214 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021215 {
21216 return "(void)";
21217 }
21218
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021219 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021220 {
21221 return "{}";
21222 }
21223
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021224 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021225 {
21226 return "(void)";
21227 }
21228
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021229 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021230 {
21231 return "{}";
21232 }
21233
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021234 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021235 {
21236 return "(void)";
21237 }
21238
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021239 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021240 {
21241 return "{}";
21242 }
21243
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021244 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021245 {
21246 return "(void)";
21247 }
21248
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021249 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021250 {
21251 return "{}";
21252 }
21253
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021254 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021255 {
21256 return "(void)";
21257 }
21258
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021259 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021260 {
21261 return "{}";
21262 }
21263
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021264 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021265 {
21266 return "(void)";
21267 }
21268
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021269 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021270 {
21271 return "{}";
21272 }
21273
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021274 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021275 {
21276 return "(void)";
21277 }
21278
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021279 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021280 {
21281 return "{}";
21282 }
21283
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021284 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021285 {
21286 return "(void)";
21287 }
21288
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021289 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021290 {
21291 return "{}";
21292 }
21293
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021294 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021295 {
21296 return "(void)";
21297 }
21298
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021299 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021300 {
21301 return "{}";
21302 }
21303
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021304 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021305 {
21306 return "(void)";
21307 }
21308
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021309 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021310 {
21311 return "{}";
21312 }
21313
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021314 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021315 {
21316 return "(void)";
21317 }
21318
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021319 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021320 {
21321 return "{}";
21322 }
21323
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021324 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021325 {
21326 return "(void)";
21327 }
21328
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021329 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021330 {
21331 return "{}";
21332 }
21333
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021334 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021335 {
21336 return "(void)";
21337 }
21338
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021339 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021340 {
21341 return "{}";
21342 }
21343
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021344 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021345 {
21346 return "(void)";
21347 }
21348
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021349 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021350 {
21351 return "{}";
21352 }
21353
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021354 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021355 {
21356 return "(void)";
21357 }
21358
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021359 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021360 {
21361 return "{}";
21362 }
21363
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021364 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021365 {
21366 return "(void)";
21367 }
21368
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021369 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021370 {
21371 return "{}";
21372 }
21373
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021374 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021375 {
21376 return "(void)";
21377 }
21378
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021379 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021380 {
21381 return "{}";
21382 }
21383
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021384 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021385 {
21386 return "(void)";
21387 }
21388
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021389 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021390 {
21391 return "{}";
21392 }
21393
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021394 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021395 {
21396 return "(void)";
21397 }
21398
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021399 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021400 {
21401 return "{}";
21402 }
21403
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021404 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021405 {
21406 return "(void)";
21407 }
21408
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021409 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021410 {
21411 return "{}";
21412 }
21413
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021414 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021415 {
21416 return "(void)";
21417 }
21418
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021419 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021420 {
21421 return "{}";
21422 }
21423
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021424 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021425 {
21426 return "(void)";
21427 }
21428
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021429 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021430 {
21431 return "{}";
21432 }
21433
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021434 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021435 {
21436 return "(void)";
21437 }
21438
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021439 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021440 {
21441 return "{}";
21442 }
21443
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021444 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021445 {
21446 return "(void)";
21447 }
21448
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021449 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021450 {
21451 return "{}";
21452 }
21453
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021454 VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021455 {
21456 return "(void)";
21457 }
21458
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021459 VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021460 {
21461 return "{}";
21462 }
21463
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021464 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021465 {
21466 return "(void)";
21467 }
21468
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021469 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021470 {
21471 return "{}";
21472 }
21473
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021474 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021475 {
21476 return "(void)";
21477 }
21478
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021479 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021480 {
21481 return "{}";
21482 }
21483
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021484 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021485 {
21486 return "(void)";
21487 }
21488
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021489 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021490 {
21491 return "{}";
21492 }
21493
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021494 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021495 {
21496 return "(void)";
21497 }
21498
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021499 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021500 {
21501 return "{}";
21502 }
21503
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021504 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021505 {
21506 return "(void)";
21507 }
21508
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021509 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021510 {
21511 return "{}";
21512 }
21513
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021514 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021515 {
21516 return "(void)";
21517 }
21518
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021519 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021520 {
21521 return "{}";
21522 }
21523
21524#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021525 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021526 {
21527 return "(void)";
21528 }
21529#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
21530
21531#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021532 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021533 {
21534 return "{}";
21535 }
21536#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
21537
21538#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021539 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021540 {
21541 return "(void)";
21542 }
21543#endif /*VK_USE_PLATFORM_MIR_KHR*/
21544
21545#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021546 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021547 {
21548 return "{}";
21549 }
21550#endif /*VK_USE_PLATFORM_MIR_KHR*/
21551
Mark Youngb5f087a2017-01-19 21:10:49 -070021552#ifdef VK_USE_PLATFORM_VI_NN
21553 VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN)
21554 {
21555 return "(void)";
21556 }
21557#endif /*VK_USE_PLATFORM_VI_NN*/
21558
21559#ifdef VK_USE_PLATFORM_VI_NN
21560 VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN)
21561 {
21562 return "{}";
21563 }
21564#endif /*VK_USE_PLATFORM_VI_NN*/
21565
Lenny Komowb0a17f22016-08-11 11:23:15 -060021566#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021567 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021568 {
21569 return "(void)";
21570 }
21571#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
21572
21573#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021574 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021575 {
21576 return "{}";
21577 }
21578#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
21579
21580#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021581 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021582 {
21583 return "(void)";
21584 }
21585#endif /*VK_USE_PLATFORM_WIN32_KHR*/
21586
21587#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021588 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021589 {
21590 return "{}";
21591 }
21592#endif /*VK_USE_PLATFORM_WIN32_KHR*/
21593
21594#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021595 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021596 {
21597 return "(void)";
21598 }
21599#endif /*VK_USE_PLATFORM_XLIB_KHR*/
21600
21601#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021602 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021603 {
21604 return "{}";
21605 }
21606#endif /*VK_USE_PLATFORM_XLIB_KHR*/
21607
21608#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021609 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021610 {
21611 return "(void)";
21612 }
21613#endif /*VK_USE_PLATFORM_XCB_KHR*/
21614
21615#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021616 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021617 {
21618 return "{}";
21619 }
21620#endif /*VK_USE_PLATFORM_XCB_KHR*/
21621
Mark Youngb5f087a2017-01-19 21:10:49 -070021622 VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBitsKHR)
21623 {
21624 return "(void)";
21625 }
21626
21627 VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagsKHR)
21628 {
21629 return "{}";
21630 }
21631
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021632 VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021633 {
21634 switch (value)
21635 {
21636 case ImageLayout::eUndefined: return "Undefined";
21637 case ImageLayout::eGeneral: return "General";
21638 case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
21639 case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
21640 case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
21641 case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
21642 case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
21643 case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
21644 case ImageLayout::ePreinitialized: return "Preinitialized";
21645 case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
21646 default: return "invalid";
21647 }
21648 }
21649
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021650 VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021651 {
21652 switch (value)
21653 {
21654 case AttachmentLoadOp::eLoad: return "Load";
21655 case AttachmentLoadOp::eClear: return "Clear";
21656 case AttachmentLoadOp::eDontCare: return "DontCare";
21657 default: return "invalid";
21658 }
21659 }
21660
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021661 VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021662 {
21663 switch (value)
21664 {
21665 case AttachmentStoreOp::eStore: return "Store";
21666 case AttachmentStoreOp::eDontCare: return "DontCare";
21667 default: return "invalid";
21668 }
21669 }
21670
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021671 VULKAN_HPP_INLINE std::string to_string(ImageType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021672 {
21673 switch (value)
21674 {
21675 case ImageType::e1D: return "1D";
21676 case ImageType::e2D: return "2D";
21677 case ImageType::e3D: return "3D";
21678 default: return "invalid";
21679 }
21680 }
21681
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021682 VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021683 {
21684 switch (value)
21685 {
21686 case ImageTiling::eOptimal: return "Optimal";
21687 case ImageTiling::eLinear: return "Linear";
21688 default: return "invalid";
21689 }
21690 }
21691
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021692 VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021693 {
21694 switch (value)
21695 {
21696 case ImageViewType::e1D: return "1D";
21697 case ImageViewType::e2D: return "2D";
21698 case ImageViewType::e3D: return "3D";
21699 case ImageViewType::eCube: return "Cube";
21700 case ImageViewType::e1DArray: return "1DArray";
21701 case ImageViewType::e2DArray: return "2DArray";
21702 case ImageViewType::eCubeArray: return "CubeArray";
21703 default: return "invalid";
21704 }
21705 }
21706
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021707 VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021708 {
21709 switch (value)
21710 {
21711 case CommandBufferLevel::ePrimary: return "Primary";
21712 case CommandBufferLevel::eSecondary: return "Secondary";
21713 default: return "invalid";
21714 }
21715 }
21716
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021717 VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021718 {
21719 switch (value)
21720 {
21721 case ComponentSwizzle::eIdentity: return "Identity";
21722 case ComponentSwizzle::eZero: return "Zero";
21723 case ComponentSwizzle::eOne: return "One";
21724 case ComponentSwizzle::eR: return "R";
21725 case ComponentSwizzle::eG: return "G";
21726 case ComponentSwizzle::eB: return "B";
21727 case ComponentSwizzle::eA: return "A";
21728 default: return "invalid";
21729 }
21730 }
21731
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021732 VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021733 {
21734 switch (value)
21735 {
21736 case DescriptorType::eSampler: return "Sampler";
21737 case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
21738 case DescriptorType::eSampledImage: return "SampledImage";
21739 case DescriptorType::eStorageImage: return "StorageImage";
21740 case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
21741 case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
21742 case DescriptorType::eUniformBuffer: return "UniformBuffer";
21743 case DescriptorType::eStorageBuffer: return "StorageBuffer";
21744 case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
21745 case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
21746 case DescriptorType::eInputAttachment: return "InputAttachment";
21747 default: return "invalid";
21748 }
21749 }
21750
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021751 VULKAN_HPP_INLINE std::string to_string(QueryType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021752 {
21753 switch (value)
21754 {
21755 case QueryType::eOcclusion: return "Occlusion";
21756 case QueryType::ePipelineStatistics: return "PipelineStatistics";
21757 case QueryType::eTimestamp: return "Timestamp";
21758 default: return "invalid";
21759 }
21760 }
21761
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021762 VULKAN_HPP_INLINE std::string to_string(BorderColor value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021763 {
21764 switch (value)
21765 {
21766 case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
21767 case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
21768 case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
21769 case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
21770 case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
21771 case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
21772 default: return "invalid";
21773 }
21774 }
21775
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021776 VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021777 {
21778 switch (value)
21779 {
21780 case PipelineBindPoint::eGraphics: return "Graphics";
21781 case PipelineBindPoint::eCompute: return "Compute";
21782 default: return "invalid";
21783 }
21784 }
21785
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021786 VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021787 {
21788 switch (value)
21789 {
21790 case PipelineCacheHeaderVersion::eOne: return "One";
21791 default: return "invalid";
21792 }
21793 }
21794
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021795 VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021796 {
21797 switch (value)
21798 {
21799 case PrimitiveTopology::ePointList: return "PointList";
21800 case PrimitiveTopology::eLineList: return "LineList";
21801 case PrimitiveTopology::eLineStrip: return "LineStrip";
21802 case PrimitiveTopology::eTriangleList: return "TriangleList";
21803 case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
21804 case PrimitiveTopology::eTriangleFan: return "TriangleFan";
21805 case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
21806 case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
21807 case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
21808 case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
21809 case PrimitiveTopology::ePatchList: return "PatchList";
21810 default: return "invalid";
21811 }
21812 }
21813
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021814 VULKAN_HPP_INLINE std::string to_string(SharingMode value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021815 {
21816 switch (value)
21817 {
21818 case SharingMode::eExclusive: return "Exclusive";
21819 case SharingMode::eConcurrent: return "Concurrent";
21820 default: return "invalid";
21821 }
21822 }
21823
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021824 VULKAN_HPP_INLINE std::string to_string(IndexType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021825 {
21826 switch (value)
21827 {
21828 case IndexType::eUint16: return "Uint16";
21829 case IndexType::eUint32: return "Uint32";
21830 default: return "invalid";
21831 }
21832 }
21833
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021834 VULKAN_HPP_INLINE std::string to_string(Filter value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021835 {
21836 switch (value)
21837 {
21838 case Filter::eNearest: return "Nearest";
21839 case Filter::eLinear: return "Linear";
21840 case Filter::eCubicIMG: return "CubicIMG";
21841 default: return "invalid";
21842 }
21843 }
21844
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021845 VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021846 {
21847 switch (value)
21848 {
21849 case SamplerMipmapMode::eNearest: return "Nearest";
21850 case SamplerMipmapMode::eLinear: return "Linear";
21851 default: return "invalid";
21852 }
21853 }
21854
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021855 VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021856 {
21857 switch (value)
21858 {
21859 case SamplerAddressMode::eRepeat: return "Repeat";
21860 case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
21861 case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
21862 case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
21863 case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
21864 default: return "invalid";
21865 }
21866 }
21867
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021868 VULKAN_HPP_INLINE std::string to_string(CompareOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021869 {
21870 switch (value)
21871 {
21872 case CompareOp::eNever: return "Never";
21873 case CompareOp::eLess: return "Less";
21874 case CompareOp::eEqual: return "Equal";
21875 case CompareOp::eLessOrEqual: return "LessOrEqual";
21876 case CompareOp::eGreater: return "Greater";
21877 case CompareOp::eNotEqual: return "NotEqual";
21878 case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
21879 case CompareOp::eAlways: return "Always";
21880 default: return "invalid";
21881 }
21882 }
21883
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021884 VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021885 {
21886 switch (value)
21887 {
21888 case PolygonMode::eFill: return "Fill";
21889 case PolygonMode::eLine: return "Line";
21890 case PolygonMode::ePoint: return "Point";
21891 default: return "invalid";
21892 }
21893 }
21894
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021895 VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021896 {
21897 switch (value)
21898 {
21899 case CullModeFlagBits::eNone: return "None";
21900 case CullModeFlagBits::eFront: return "Front";
21901 case CullModeFlagBits::eBack: return "Back";
21902 case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
21903 default: return "invalid";
21904 }
21905 }
21906
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021907 VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021908 {
21909 if (!value) return "{}";
21910 std::string result;
21911 if (value & CullModeFlagBits::eNone) result += "None | ";
21912 if (value & CullModeFlagBits::eFront) result += "Front | ";
21913 if (value & CullModeFlagBits::eBack) result += "Back | ";
21914 if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
21915 return "{" + result.substr(0, result.size() - 3) + "}";
21916 }
21917
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021918 VULKAN_HPP_INLINE std::string to_string(FrontFace value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021919 {
21920 switch (value)
21921 {
21922 case FrontFace::eCounterClockwise: return "CounterClockwise";
21923 case FrontFace::eClockwise: return "Clockwise";
21924 default: return "invalid";
21925 }
21926 }
21927
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021928 VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021929 {
21930 switch (value)
21931 {
21932 case BlendFactor::eZero: return "Zero";
21933 case BlendFactor::eOne: return "One";
21934 case BlendFactor::eSrcColor: return "SrcColor";
21935 case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
21936 case BlendFactor::eDstColor: return "DstColor";
21937 case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
21938 case BlendFactor::eSrcAlpha: return "SrcAlpha";
21939 case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
21940 case BlendFactor::eDstAlpha: return "DstAlpha";
21941 case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
21942 case BlendFactor::eConstantColor: return "ConstantColor";
21943 case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
21944 case BlendFactor::eConstantAlpha: return "ConstantAlpha";
21945 case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
21946 case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
21947 case BlendFactor::eSrc1Color: return "Src1Color";
21948 case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
21949 case BlendFactor::eSrc1Alpha: return "Src1Alpha";
21950 case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
21951 default: return "invalid";
21952 }
21953 }
21954
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021955 VULKAN_HPP_INLINE std::string to_string(BlendOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021956 {
21957 switch (value)
21958 {
21959 case BlendOp::eAdd: return "Add";
21960 case BlendOp::eSubtract: return "Subtract";
21961 case BlendOp::eReverseSubtract: return "ReverseSubtract";
21962 case BlendOp::eMin: return "Min";
21963 case BlendOp::eMax: return "Max";
21964 default: return "invalid";
21965 }
21966 }
21967
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021968 VULKAN_HPP_INLINE std::string to_string(StencilOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021969 {
21970 switch (value)
21971 {
21972 case StencilOp::eKeep: return "Keep";
21973 case StencilOp::eZero: return "Zero";
21974 case StencilOp::eReplace: return "Replace";
21975 case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
21976 case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
21977 case StencilOp::eInvert: return "Invert";
21978 case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
21979 case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
21980 default: return "invalid";
21981 }
21982 }
21983
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070021984 VULKAN_HPP_INLINE std::string to_string(LogicOp value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060021985 {
21986 switch (value)
21987 {
21988 case LogicOp::eClear: return "Clear";
21989 case LogicOp::eAnd: return "And";
21990 case LogicOp::eAndReverse: return "AndReverse";
21991 case LogicOp::eCopy: return "Copy";
21992 case LogicOp::eAndInverted: return "AndInverted";
21993 case LogicOp::eNoOp: return "NoOp";
21994 case LogicOp::eXor: return "Xor";
21995 case LogicOp::eOr: return "Or";
21996 case LogicOp::eNor: return "Nor";
21997 case LogicOp::eEquivalent: return "Equivalent";
21998 case LogicOp::eInvert: return "Invert";
21999 case LogicOp::eOrReverse: return "OrReverse";
22000 case LogicOp::eCopyInverted: return "CopyInverted";
22001 case LogicOp::eOrInverted: return "OrInverted";
22002 case LogicOp::eNand: return "Nand";
22003 case LogicOp::eSet: return "Set";
22004 default: return "invalid";
22005 }
22006 }
22007
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022008 VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022009 {
22010 switch (value)
22011 {
22012 case InternalAllocationType::eExecutable: return "Executable";
22013 default: return "invalid";
22014 }
22015 }
22016
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022017 VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022018 {
22019 switch (value)
22020 {
22021 case SystemAllocationScope::eCommand: return "Command";
22022 case SystemAllocationScope::eObject: return "Object";
22023 case SystemAllocationScope::eCache: return "Cache";
22024 case SystemAllocationScope::eDevice: return "Device";
22025 case SystemAllocationScope::eInstance: return "Instance";
22026 default: return "invalid";
22027 }
22028 }
22029
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022030 VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022031 {
22032 switch (value)
22033 {
22034 case PhysicalDeviceType::eOther: return "Other";
22035 case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
22036 case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
22037 case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
22038 case PhysicalDeviceType::eCpu: return "Cpu";
22039 default: return "invalid";
22040 }
22041 }
22042
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022043 VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022044 {
22045 switch (value)
22046 {
22047 case VertexInputRate::eVertex: return "Vertex";
22048 case VertexInputRate::eInstance: return "Instance";
22049 default: return "invalid";
22050 }
22051 }
22052
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022053 VULKAN_HPP_INLINE std::string to_string(Format value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022054 {
22055 switch (value)
22056 {
22057 case Format::eUndefined: return "Undefined";
22058 case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
22059 case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
22060 case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
22061 case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
22062 case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
22063 case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
22064 case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
22065 case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
22066 case Format::eR8Unorm: return "R8Unorm";
22067 case Format::eR8Snorm: return "R8Snorm";
22068 case Format::eR8Uscaled: return "R8Uscaled";
22069 case Format::eR8Sscaled: return "R8Sscaled";
22070 case Format::eR8Uint: return "R8Uint";
22071 case Format::eR8Sint: return "R8Sint";
22072 case Format::eR8Srgb: return "R8Srgb";
22073 case Format::eR8G8Unorm: return "R8G8Unorm";
22074 case Format::eR8G8Snorm: return "R8G8Snorm";
22075 case Format::eR8G8Uscaled: return "R8G8Uscaled";
22076 case Format::eR8G8Sscaled: return "R8G8Sscaled";
22077 case Format::eR8G8Uint: return "R8G8Uint";
22078 case Format::eR8G8Sint: return "R8G8Sint";
22079 case Format::eR8G8Srgb: return "R8G8Srgb";
22080 case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
22081 case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
22082 case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
22083 case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
22084 case Format::eR8G8B8Uint: return "R8G8B8Uint";
22085 case Format::eR8G8B8Sint: return "R8G8B8Sint";
22086 case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
22087 case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
22088 case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
22089 case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
22090 case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
22091 case Format::eB8G8R8Uint: return "B8G8R8Uint";
22092 case Format::eB8G8R8Sint: return "B8G8R8Sint";
22093 case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
22094 case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
22095 case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
22096 case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
22097 case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
22098 case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
22099 case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
22100 case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
22101 case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
22102 case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
22103 case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
22104 case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
22105 case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
22106 case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
22107 case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
22108 case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
22109 case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
22110 case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
22111 case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
22112 case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
22113 case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
22114 case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
22115 case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
22116 case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
22117 case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
22118 case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
22119 case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
22120 case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
22121 case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
22122 case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
22123 case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
22124 case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
22125 case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
22126 case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
22127 case Format::eR16Unorm: return "R16Unorm";
22128 case Format::eR16Snorm: return "R16Snorm";
22129 case Format::eR16Uscaled: return "R16Uscaled";
22130 case Format::eR16Sscaled: return "R16Sscaled";
22131 case Format::eR16Uint: return "R16Uint";
22132 case Format::eR16Sint: return "R16Sint";
22133 case Format::eR16Sfloat: return "R16Sfloat";
22134 case Format::eR16G16Unorm: return "R16G16Unorm";
22135 case Format::eR16G16Snorm: return "R16G16Snorm";
22136 case Format::eR16G16Uscaled: return "R16G16Uscaled";
22137 case Format::eR16G16Sscaled: return "R16G16Sscaled";
22138 case Format::eR16G16Uint: return "R16G16Uint";
22139 case Format::eR16G16Sint: return "R16G16Sint";
22140 case Format::eR16G16Sfloat: return "R16G16Sfloat";
22141 case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
22142 case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
22143 case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
22144 case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
22145 case Format::eR16G16B16Uint: return "R16G16B16Uint";
22146 case Format::eR16G16B16Sint: return "R16G16B16Sint";
22147 case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
22148 case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
22149 case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
22150 case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
22151 case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
22152 case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
22153 case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
22154 case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
22155 case Format::eR32Uint: return "R32Uint";
22156 case Format::eR32Sint: return "R32Sint";
22157 case Format::eR32Sfloat: return "R32Sfloat";
22158 case Format::eR32G32Uint: return "R32G32Uint";
22159 case Format::eR32G32Sint: return "R32G32Sint";
22160 case Format::eR32G32Sfloat: return "R32G32Sfloat";
22161 case Format::eR32G32B32Uint: return "R32G32B32Uint";
22162 case Format::eR32G32B32Sint: return "R32G32B32Sint";
22163 case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
22164 case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
22165 case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
22166 case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
22167 case Format::eR64Uint: return "R64Uint";
22168 case Format::eR64Sint: return "R64Sint";
22169 case Format::eR64Sfloat: return "R64Sfloat";
22170 case Format::eR64G64Uint: return "R64G64Uint";
22171 case Format::eR64G64Sint: return "R64G64Sint";
22172 case Format::eR64G64Sfloat: return "R64G64Sfloat";
22173 case Format::eR64G64B64Uint: return "R64G64B64Uint";
22174 case Format::eR64G64B64Sint: return "R64G64B64Sint";
22175 case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
22176 case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
22177 case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
22178 case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
22179 case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
22180 case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
22181 case Format::eD16Unorm: return "D16Unorm";
22182 case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
22183 case Format::eD32Sfloat: return "D32Sfloat";
22184 case Format::eS8Uint: return "S8Uint";
22185 case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
22186 case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
22187 case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
22188 case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
22189 case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
22190 case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
22191 case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
22192 case Format::eBc2UnormBlock: return "Bc2UnormBlock";
22193 case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
22194 case Format::eBc3UnormBlock: return "Bc3UnormBlock";
22195 case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
22196 case Format::eBc4UnormBlock: return "Bc4UnormBlock";
22197 case Format::eBc4SnormBlock: return "Bc4SnormBlock";
22198 case Format::eBc5UnormBlock: return "Bc5UnormBlock";
22199 case Format::eBc5SnormBlock: return "Bc5SnormBlock";
22200 case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
22201 case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
22202 case Format::eBc7UnormBlock: return "Bc7UnormBlock";
22203 case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
22204 case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
22205 case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
22206 case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
22207 case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
22208 case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
22209 case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
22210 case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
22211 case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
22212 case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
22213 case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
22214 case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
22215 case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
22216 case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
22217 case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
22218 case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
22219 case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
22220 case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
22221 case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
22222 case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
22223 case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
22224 case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
22225 case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
22226 case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
22227 case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
22228 case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
22229 case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
22230 case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
22231 case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
22232 case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
22233 case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
22234 case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
22235 case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
22236 case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
22237 case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
22238 case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
22239 case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
22240 case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
22241 case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
Lenny Komow339ffcd2016-08-26 14:10:08 -060022242 case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
22243 case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
22244 case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
22245 case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
22246 case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
22247 case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
22248 case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
22249 case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022250 default: return "invalid";
22251 }
22252 }
22253
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022254 VULKAN_HPP_INLINE std::string to_string(StructureType value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022255 {
22256 switch (value)
22257 {
22258 case StructureType::eApplicationInfo: return "ApplicationInfo";
22259 case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
22260 case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
22261 case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
22262 case StructureType::eSubmitInfo: return "SubmitInfo";
22263 case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
22264 case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
22265 case StructureType::eBindSparseInfo: return "BindSparseInfo";
22266 case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
22267 case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
22268 case StructureType::eEventCreateInfo: return "EventCreateInfo";
22269 case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
22270 case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
22271 case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
22272 case StructureType::eImageCreateInfo: return "ImageCreateInfo";
22273 case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
22274 case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
22275 case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
22276 case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
22277 case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
22278 case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
22279 case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
22280 case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
22281 case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
22282 case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
22283 case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
22284 case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
22285 case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
22286 case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
22287 case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
22288 case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
22289 case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
22290 case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
22291 case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
22292 case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
22293 case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
22294 case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
22295 case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
22296 case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
22297 case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
22298 case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
22299 case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
22300 case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
22301 case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
22302 case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
22303 case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
22304 case StructureType::eMemoryBarrier: return "MemoryBarrier";
22305 case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
22306 case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
22307 case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
22308 case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
22309 case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
22310 case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
22311 case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
22312 case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
22313 case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
22314 case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
22315 case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR";
22316 case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
22317 case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
22318 case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
22319 case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
22320 case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
22321 case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
22322 case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
22323 case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
22324 case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
22325 case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
Lenny Komow96962992016-08-31 15:03:49 -060022326 case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
22327 case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
22328 case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
22329 case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
22330 case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
Mark Youngb5f087a2017-01-19 21:10:49 -070022331 case StructureType::ePhysicalDeviceFeatures2KHR: return "PhysicalDeviceFeatures2KHR";
22332 case StructureType::ePhysicalDeviceProperties2KHR: return "PhysicalDeviceProperties2KHR";
22333 case StructureType::eFormatProperties2KHR: return "FormatProperties2KHR";
22334 case StructureType::eImageFormatProperties2KHR: return "ImageFormatProperties2KHR";
22335 case StructureType::ePhysicalDeviceImageFormatInfo2KHR: return "PhysicalDeviceImageFormatInfo2KHR";
22336 case StructureType::eQueueFamilyProperties2KHR: return "QueueFamilyProperties2KHR";
22337 case StructureType::ePhysicalDeviceMemoryProperties2KHR: return "PhysicalDeviceMemoryProperties2KHR";
22338 case StructureType::eSparseImageFormatProperties2KHR: return "SparseImageFormatProperties2KHR";
22339 case StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR: return "PhysicalDeviceSparseImageFormatInfo2KHR";
Lenny Komow5b8df842016-09-29 14:16:59 -060022340 case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
Mark Youngb5f087a2017-01-19 21:10:49 -070022341 case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022342 case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
22343 case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
22344 case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
22345 case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
22346 case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
22347 case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
Mark Youngb5f087a2017-01-19 21:10:49 -070022348 case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
22349 case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
22350 case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
22351 case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
22352 case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022353 default: return "invalid";
22354 }
22355 }
22356
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022357 VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022358 {
22359 switch (value)
22360 {
22361 case SubpassContents::eInline: return "Inline";
22362 case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
22363 default: return "invalid";
22364 }
22365 }
22366
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022367 VULKAN_HPP_INLINE std::string to_string(DynamicState value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022368 {
22369 switch (value)
22370 {
22371 case DynamicState::eViewport: return "Viewport";
22372 case DynamicState::eScissor: return "Scissor";
22373 case DynamicState::eLineWidth: return "LineWidth";
22374 case DynamicState::eDepthBias: return "DepthBias";
22375 case DynamicState::eBlendConstants: return "BlendConstants";
22376 case DynamicState::eDepthBounds: return "DepthBounds";
22377 case DynamicState::eStencilCompareMask: return "StencilCompareMask";
22378 case DynamicState::eStencilWriteMask: return "StencilWriteMask";
22379 case DynamicState::eStencilReference: return "StencilReference";
22380 default: return "invalid";
22381 }
22382 }
22383
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022384 VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022385 {
22386 switch (value)
22387 {
22388 case QueueFlagBits::eGraphics: return "Graphics";
22389 case QueueFlagBits::eCompute: return "Compute";
22390 case QueueFlagBits::eTransfer: return "Transfer";
22391 case QueueFlagBits::eSparseBinding: return "SparseBinding";
22392 default: return "invalid";
22393 }
22394 }
22395
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022396 VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022397 {
22398 if (!value) return "{}";
22399 std::string result;
22400 if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
22401 if (value & QueueFlagBits::eCompute) result += "Compute | ";
22402 if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
22403 if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
22404 return "{" + result.substr(0, result.size() - 3) + "}";
22405 }
22406
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022407 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022408 {
22409 switch (value)
22410 {
22411 case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
22412 case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
22413 case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
22414 case MemoryPropertyFlagBits::eHostCached: return "HostCached";
22415 case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
22416 default: return "invalid";
22417 }
22418 }
22419
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022420 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022421 {
22422 if (!value) return "{}";
22423 std::string result;
22424 if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
22425 if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
22426 if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
22427 if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
22428 if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
22429 return "{" + result.substr(0, result.size() - 3) + "}";
22430 }
22431
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022432 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022433 {
22434 switch (value)
22435 {
22436 case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
22437 default: return "invalid";
22438 }
22439 }
22440
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022441 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022442 {
22443 if (!value) return "{}";
22444 std::string result;
22445 if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
22446 return "{" + result.substr(0, result.size() - 3) + "}";
22447 }
22448
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022449 VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022450 {
22451 switch (value)
22452 {
22453 case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
22454 case AccessFlagBits::eIndexRead: return "IndexRead";
22455 case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
22456 case AccessFlagBits::eUniformRead: return "UniformRead";
22457 case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
22458 case AccessFlagBits::eShaderRead: return "ShaderRead";
22459 case AccessFlagBits::eShaderWrite: return "ShaderWrite";
22460 case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
22461 case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
22462 case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
22463 case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
22464 case AccessFlagBits::eTransferRead: return "TransferRead";
22465 case AccessFlagBits::eTransferWrite: return "TransferWrite";
22466 case AccessFlagBits::eHostRead: return "HostRead";
22467 case AccessFlagBits::eHostWrite: return "HostWrite";
22468 case AccessFlagBits::eMemoryRead: return "MemoryRead";
22469 case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022470 case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
22471 case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022472 default: return "invalid";
22473 }
22474 }
22475
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022476 VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022477 {
22478 if (!value) return "{}";
22479 std::string result;
22480 if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
22481 if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
22482 if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
22483 if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
22484 if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
22485 if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
22486 if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
22487 if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
22488 if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
22489 if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
22490 if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
22491 if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
22492 if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
22493 if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
22494 if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
22495 if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
22496 if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022497 if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
22498 if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022499 return "{" + result.substr(0, result.size() - 3) + "}";
22500 }
22501
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022502 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022503 {
22504 switch (value)
22505 {
22506 case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
22507 case BufferUsageFlagBits::eTransferDst: return "TransferDst";
22508 case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
22509 case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
22510 case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
22511 case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
22512 case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
22513 case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
22514 case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
22515 default: return "invalid";
22516 }
22517 }
22518
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022519 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022520 {
22521 if (!value) return "{}";
22522 std::string result;
22523 if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
22524 if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
22525 if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
22526 if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
22527 if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
22528 if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
22529 if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
22530 if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
22531 if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
22532 return "{" + result.substr(0, result.size() - 3) + "}";
22533 }
22534
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022535 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022536 {
22537 switch (value)
22538 {
22539 case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
22540 case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
22541 case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
22542 default: return "invalid";
22543 }
22544 }
22545
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022546 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022547 {
22548 if (!value) return "{}";
22549 std::string result;
22550 if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
22551 if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
22552 if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
22553 return "{" + result.substr(0, result.size() - 3) + "}";
22554 }
22555
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022556 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022557 {
22558 switch (value)
22559 {
22560 case ShaderStageFlagBits::eVertex: return "Vertex";
22561 case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
22562 case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
22563 case ShaderStageFlagBits::eGeometry: return "Geometry";
22564 case ShaderStageFlagBits::eFragment: return "Fragment";
22565 case ShaderStageFlagBits::eCompute: return "Compute";
22566 case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
22567 case ShaderStageFlagBits::eAll: return "All";
22568 default: return "invalid";
22569 }
22570 }
22571
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022572 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022573 {
22574 if (!value) return "{}";
22575 std::string result;
22576 if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
22577 if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
22578 if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
22579 if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
22580 if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
22581 if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
22582 if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
22583 if (value & ShaderStageFlagBits::eAll) result += "All | ";
22584 return "{" + result.substr(0, result.size() - 3) + "}";
22585 }
22586
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022587 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022588 {
22589 switch (value)
22590 {
22591 case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
22592 case ImageUsageFlagBits::eTransferDst: return "TransferDst";
22593 case ImageUsageFlagBits::eSampled: return "Sampled";
22594 case ImageUsageFlagBits::eStorage: return "Storage";
22595 case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
22596 case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
22597 case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
22598 case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
22599 default: return "invalid";
22600 }
22601 }
22602
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022603 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022604 {
22605 if (!value) return "{}";
22606 std::string result;
22607 if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
22608 if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
22609 if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
22610 if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
22611 if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
22612 if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
22613 if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
22614 if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
22615 return "{" + result.substr(0, result.size() - 3) + "}";
22616 }
22617
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022618 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022619 {
22620 switch (value)
22621 {
22622 case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
22623 case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
22624 case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
22625 case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
22626 case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
Mark Youngb5f087a2017-01-19 21:10:49 -070022627 case ImageCreateFlagBits::e2DArrayCompatibleKHR: return "2DArrayCompatibleKHR";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022628 default: return "invalid";
22629 }
22630 }
22631
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022632 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022633 {
22634 if (!value) return "{}";
22635 std::string result;
22636 if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
22637 if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
22638 if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
22639 if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
22640 if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
Mark Youngb5f087a2017-01-19 21:10:49 -070022641 if (value & ImageCreateFlagBits::e2DArrayCompatibleKHR) result += "2DArrayCompatibleKHR | ";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022642 return "{" + result.substr(0, result.size() - 3) + "}";
22643 }
22644
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022645 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022646 {
22647 switch (value)
22648 {
22649 case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
22650 case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
22651 case PipelineCreateFlagBits::eDerivative: return "Derivative";
22652 default: return "invalid";
22653 }
22654 }
22655
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022656 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022657 {
22658 if (!value) return "{}";
22659 std::string result;
22660 if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
22661 if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
22662 if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
22663 return "{" + result.substr(0, result.size() - 3) + "}";
22664 }
22665
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022666 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022667 {
22668 switch (value)
22669 {
22670 case ColorComponentFlagBits::eR: return "R";
22671 case ColorComponentFlagBits::eG: return "G";
22672 case ColorComponentFlagBits::eB: return "B";
22673 case ColorComponentFlagBits::eA: return "A";
22674 default: return "invalid";
22675 }
22676 }
22677
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022678 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022679 {
22680 if (!value) return "{}";
22681 std::string result;
22682 if (value & ColorComponentFlagBits::eR) result += "R | ";
22683 if (value & ColorComponentFlagBits::eG) result += "G | ";
22684 if (value & ColorComponentFlagBits::eB) result += "B | ";
22685 if (value & ColorComponentFlagBits::eA) result += "A | ";
22686 return "{" + result.substr(0, result.size() - 3) + "}";
22687 }
22688
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022689 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022690 {
22691 switch (value)
22692 {
22693 case FenceCreateFlagBits::eSignaled: return "Signaled";
22694 default: return "invalid";
22695 }
22696 }
22697
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022698 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022699 {
22700 if (!value) return "{}";
22701 std::string result;
22702 if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
22703 return "{" + result.substr(0, result.size() - 3) + "}";
22704 }
22705
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022706 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022707 {
22708 switch (value)
22709 {
22710 case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
22711 case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
22712 case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
22713 case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
22714 case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
22715 case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
22716 case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
22717 case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
22718 case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
22719 case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
22720 case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
22721 case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
22722 case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
22723 case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
Mark Youngb5f087a2017-01-19 21:10:49 -070022724 case FormatFeatureFlagBits::eTransferSrcKHR: return "TransferSrcKHR";
22725 case FormatFeatureFlagBits::eTransferDstKHR: return "TransferDstKHR";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022726 default: return "invalid";
22727 }
22728 }
22729
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022730 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022731 {
22732 if (!value) return "{}";
22733 std::string result;
22734 if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
22735 if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
22736 if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
22737 if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
22738 if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
22739 if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
22740 if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
22741 if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
22742 if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
22743 if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
22744 if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
22745 if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
22746 if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
22747 if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
Mark Youngb5f087a2017-01-19 21:10:49 -070022748 if (value & FormatFeatureFlagBits::eTransferSrcKHR) result += "TransferSrcKHR | ";
22749 if (value & FormatFeatureFlagBits::eTransferDstKHR) result += "TransferDstKHR | ";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022750 return "{" + result.substr(0, result.size() - 3) + "}";
22751 }
22752
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022753 VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022754 {
22755 switch (value)
22756 {
22757 case QueryControlFlagBits::ePrecise: return "Precise";
22758 default: return "invalid";
22759 }
22760 }
22761
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022762 VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022763 {
22764 if (!value) return "{}";
22765 std::string result;
22766 if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
22767 return "{" + result.substr(0, result.size() - 3) + "}";
22768 }
22769
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022770 VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022771 {
22772 switch (value)
22773 {
22774 case QueryResultFlagBits::e64: return "64";
22775 case QueryResultFlagBits::eWait: return "Wait";
22776 case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
22777 case QueryResultFlagBits::ePartial: return "Partial";
22778 default: return "invalid";
22779 }
22780 }
22781
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022782 VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022783 {
22784 if (!value) return "{}";
22785 std::string result;
22786 if (value & QueryResultFlagBits::e64) result += "64 | ";
22787 if (value & QueryResultFlagBits::eWait) result += "Wait | ";
22788 if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
22789 if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
22790 return "{" + result.substr(0, result.size() - 3) + "}";
22791 }
22792
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022793 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022794 {
22795 switch (value)
22796 {
22797 case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
22798 case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
22799 case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
22800 default: return "invalid";
22801 }
22802 }
22803
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022804 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022805 {
22806 if (!value) return "{}";
22807 std::string result;
22808 if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
22809 if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
22810 if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
22811 return "{" + result.substr(0, result.size() - 3) + "}";
22812 }
22813
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022814 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022815 {
22816 switch (value)
22817 {
22818 case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
22819 case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
22820 case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
22821 case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
22822 case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
22823 case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
22824 case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
22825 case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
22826 case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
22827 case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
22828 case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
22829 default: return "invalid";
22830 }
22831 }
22832
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022833 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022834 {
22835 if (!value) return "{}";
22836 std::string result;
22837 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
22838 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
22839 if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
22840 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
22841 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
22842 if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
22843 if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
22844 if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
22845 if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
22846 if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
22847 if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
22848 return "{" + result.substr(0, result.size() - 3) + "}";
22849 }
22850
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022851 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022852 {
22853 switch (value)
22854 {
22855 case ImageAspectFlagBits::eColor: return "Color";
22856 case ImageAspectFlagBits::eDepth: return "Depth";
22857 case ImageAspectFlagBits::eStencil: return "Stencil";
22858 case ImageAspectFlagBits::eMetadata: return "Metadata";
22859 default: return "invalid";
22860 }
22861 }
22862
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022863 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022864 {
22865 if (!value) return "{}";
22866 std::string result;
22867 if (value & ImageAspectFlagBits::eColor) result += "Color | ";
22868 if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
22869 if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
22870 if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
22871 return "{" + result.substr(0, result.size() - 3) + "}";
22872 }
22873
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022874 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022875 {
22876 switch (value)
22877 {
22878 case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
22879 case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
22880 case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
22881 default: return "invalid";
22882 }
22883 }
22884
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022885 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022886 {
22887 if (!value) return "{}";
22888 std::string result;
22889 if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
22890 if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
22891 if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
22892 return "{" + result.substr(0, result.size() - 3) + "}";
22893 }
22894
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022895 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022896 {
22897 switch (value)
22898 {
22899 case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
22900 default: return "invalid";
22901 }
22902 }
22903
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022904 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022905 {
22906 if (!value) return "{}";
22907 std::string result;
22908 if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
22909 return "{" + result.substr(0, result.size() - 3) + "}";
22910 }
22911
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022912 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022913 {
22914 switch (value)
22915 {
22916 case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
22917 case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
22918 case PipelineStageFlagBits::eVertexInput: return "VertexInput";
22919 case PipelineStageFlagBits::eVertexShader: return "VertexShader";
22920 case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
22921 case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
22922 case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
22923 case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
22924 case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
22925 case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
22926 case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
22927 case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
22928 case PipelineStageFlagBits::eTransfer: return "Transfer";
22929 case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
22930 case PipelineStageFlagBits::eHost: return "Host";
22931 case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
22932 case PipelineStageFlagBits::eAllCommands: return "AllCommands";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022933 case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022934 default: return "invalid";
22935 }
22936 }
22937
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022938 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022939 {
22940 if (!value) return "{}";
22941 std::string result;
22942 if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
22943 if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
22944 if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
22945 if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
22946 if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
22947 if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
22948 if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
22949 if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
22950 if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
22951 if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
22952 if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
22953 if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
22954 if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
22955 if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
22956 if (value & PipelineStageFlagBits::eHost) result += "Host | ";
22957 if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
22958 if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022959 if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
Lenny Komowb0a17f22016-08-11 11:23:15 -060022960 return "{" + result.substr(0, result.size() - 3) + "}";
22961 }
22962
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022963 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022964 {
22965 switch (value)
22966 {
22967 case CommandPoolCreateFlagBits::eTransient: return "Transient";
22968 case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
22969 default: return "invalid";
22970 }
22971 }
22972
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022973 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022974 {
22975 if (!value) return "{}";
22976 std::string result;
22977 if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
22978 if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
22979 return "{" + result.substr(0, result.size() - 3) + "}";
22980 }
22981
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022982 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022983 {
22984 switch (value)
22985 {
22986 case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
22987 default: return "invalid";
22988 }
22989 }
22990
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022991 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060022992 {
22993 if (!value) return "{}";
22994 std::string result;
22995 if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
22996 return "{" + result.substr(0, result.size() - 3) + "}";
22997 }
22998
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070022999 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023000 {
23001 switch (value)
23002 {
23003 case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
23004 default: return "invalid";
23005 }
23006 }
23007
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023008 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023009 {
23010 if (!value) return "{}";
23011 std::string result;
23012 if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
23013 return "{" + result.substr(0, result.size() - 3) + "}";
23014 }
23015
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023016 VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023017 {
23018 switch (value)
23019 {
23020 case SampleCountFlagBits::e1: return "1";
23021 case SampleCountFlagBits::e2: return "2";
23022 case SampleCountFlagBits::e4: return "4";
23023 case SampleCountFlagBits::e8: return "8";
23024 case SampleCountFlagBits::e16: return "16";
23025 case SampleCountFlagBits::e32: return "32";
23026 case SampleCountFlagBits::e64: return "64";
23027 default: return "invalid";
23028 }
23029 }
23030
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023031 VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023032 {
23033 if (!value) return "{}";
23034 std::string result;
23035 if (value & SampleCountFlagBits::e1) result += "1 | ";
23036 if (value & SampleCountFlagBits::e2) result += "2 | ";
23037 if (value & SampleCountFlagBits::e4) result += "4 | ";
23038 if (value & SampleCountFlagBits::e8) result += "8 | ";
23039 if (value & SampleCountFlagBits::e16) result += "16 | ";
23040 if (value & SampleCountFlagBits::e32) result += "32 | ";
23041 if (value & SampleCountFlagBits::e64) result += "64 | ";
23042 return "{" + result.substr(0, result.size() - 3) + "}";
23043 }
23044
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023045 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023046 {
23047 switch (value)
23048 {
23049 case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
23050 default: return "invalid";
23051 }
23052 }
23053
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023054 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023055 {
23056 if (!value) return "{}";
23057 std::string result;
23058 if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
23059 return "{" + result.substr(0, result.size() - 3) + "}";
23060 }
23061
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023062 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023063 {
23064 switch (value)
23065 {
23066 case StencilFaceFlagBits::eFront: return "Front";
23067 case StencilFaceFlagBits::eBack: return "Back";
23068 case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
23069 default: return "invalid";
23070 }
23071 }
23072
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023073 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023074 {
23075 if (!value) return "{}";
23076 std::string result;
23077 if (value & StencilFaceFlagBits::eFront) result += "Front | ";
23078 if (value & StencilFaceFlagBits::eBack) result += "Back | ";
23079 if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
23080 return "{" + result.substr(0, result.size() - 3) + "}";
23081 }
23082
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023083 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023084 {
23085 switch (value)
23086 {
23087 case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
23088 default: return "invalid";
23089 }
23090 }
23091
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023092 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023093 {
23094 if (!value) return "{}";
23095 std::string result;
23096 if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
23097 return "{" + result.substr(0, result.size() - 3) + "}";
23098 }
23099
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023100 VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023101 {
23102 switch (value)
23103 {
23104 case DependencyFlagBits::eByRegion: return "ByRegion";
23105 default: return "invalid";
23106 }
23107 }
23108
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023109 VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023110 {
23111 if (!value) return "{}";
23112 std::string result;
23113 if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
23114 return "{" + result.substr(0, result.size() - 3) + "}";
23115 }
23116
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023117 VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023118 {
23119 switch (value)
23120 {
23121 case PresentModeKHR::eImmediate: return "Immediate";
23122 case PresentModeKHR::eMailbox: return "Mailbox";
23123 case PresentModeKHR::eFifo: return "Fifo";
23124 case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
23125 default: return "invalid";
23126 }
23127 }
23128
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023129 VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023130 {
23131 switch (value)
23132 {
23133 case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
Mark Youngb5f087a2017-01-19 21:10:49 -070023134 case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT";
23135 case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
23136 case ColorSpaceKHR::eScrgbLinearEXT: return "ScrgbLinearEXT";
23137 case ColorSpaceKHR::eScrgbNonlinearEXT: return "ScrgbNonlinearEXT";
23138 case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT";
23139 case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
23140 case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
23141 case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
23142 case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
23143 case ColorSpaceKHR::eBt2020NonlinearEXT: return "Bt2020NonlinearEXT";
23144 case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
23145 case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
Lenny Komowb0a17f22016-08-11 11:23:15 -060023146 default: return "invalid";
23147 }
23148 }
23149
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023150 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023151 {
23152 switch (value)
23153 {
23154 case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
23155 case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
23156 case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
23157 case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
23158 default: return "invalid";
23159 }
23160 }
23161
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023162 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023163 {
23164 if (!value) return "{}";
23165 std::string result;
23166 if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
23167 if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
23168 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
23169 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
23170 return "{" + result.substr(0, result.size() - 3) + "}";
23171 }
23172
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023173 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023174 {
23175 switch (value)
23176 {
23177 case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
23178 case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
23179 case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
23180 case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
23181 default: return "invalid";
23182 }
23183 }
23184
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023185 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023186 {
23187 if (!value) return "{}";
23188 std::string result;
23189 if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
23190 if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
23191 if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
23192 if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
23193 return "{" + result.substr(0, result.size() - 3) + "}";
23194 }
23195
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023196 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023197 {
23198 switch (value)
23199 {
23200 case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
23201 case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
23202 case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
23203 case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
23204 case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
23205 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
23206 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
23207 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
23208 case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
23209 default: return "invalid";
23210 }
23211 }
23212
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023213 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023214 {
23215 if (!value) return "{}";
23216 std::string result;
23217 if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
23218 if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
23219 if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
23220 if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
23221 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
23222 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
23223 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
23224 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
23225 if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
23226 return "{" + result.substr(0, result.size() - 3) + "}";
23227 }
23228
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023229 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023230 {
23231 switch (value)
23232 {
23233 case DebugReportFlagBitsEXT::eInformation: return "Information";
23234 case DebugReportFlagBitsEXT::eWarning: return "Warning";
23235 case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
23236 case DebugReportFlagBitsEXT::eError: return "Error";
23237 case DebugReportFlagBitsEXT::eDebug: return "Debug";
23238 default: return "invalid";
23239 }
23240 }
23241
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023242 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023243 {
23244 if (!value) return "{}";
23245 std::string result;
23246 if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
23247 if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
23248 if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
23249 if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
23250 if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
23251 return "{" + result.substr(0, result.size() - 3) + "}";
23252 }
23253
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023254 VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023255 {
23256 switch (value)
23257 {
23258 case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
23259 case DebugReportObjectTypeEXT::eInstance: return "Instance";
23260 case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
23261 case DebugReportObjectTypeEXT::eDevice: return "Device";
23262 case DebugReportObjectTypeEXT::eQueue: return "Queue";
23263 case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
23264 case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
23265 case DebugReportObjectTypeEXT::eFence: return "Fence";
23266 case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
23267 case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
23268 case DebugReportObjectTypeEXT::eImage: return "Image";
23269 case DebugReportObjectTypeEXT::eEvent: return "Event";
23270 case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
23271 case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
23272 case DebugReportObjectTypeEXT::eImageView: return "ImageView";
23273 case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
23274 case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
23275 case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
23276 case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
23277 case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
23278 case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
23279 case DebugReportObjectTypeEXT::eSampler: return "Sampler";
23280 case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
23281 case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
23282 case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
23283 case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
23284 case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
23285 case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
23286 case DebugReportObjectTypeEXT::eDebugReport: return "DebugReport";
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023287 case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
23288 case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
23289 case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
23290 case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
Lenny Komowb0a17f22016-08-11 11:23:15 -060023291 default: return "invalid";
23292 }
23293 }
23294
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023295 VULKAN_HPP_INLINE std::string to_string(DebugReportErrorEXT value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023296 {
23297 switch (value)
23298 {
23299 case DebugReportErrorEXT::eNone: return "None";
23300 case DebugReportErrorEXT::eCallbackRef: return "CallbackRef";
23301 default: return "invalid";
23302 }
23303 }
23304
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023305 VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
Lenny Komowb0a17f22016-08-11 11:23:15 -060023306 {
23307 switch (value)
23308 {
23309 case RasterizationOrderAMD::eStrict: return "Strict";
23310 case RasterizationOrderAMD::eRelaxed: return "Relaxed";
23311 default: return "invalid";
23312 }
23313 }
23314
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023315 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
Lenny Komow96962992016-08-31 15:03:49 -060023316 {
23317 switch (value)
23318 {
23319 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
23320 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
23321 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
23322 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
23323 default: return "invalid";
23324 }
23325 }
23326
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023327 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
Lenny Komow96962992016-08-31 15:03:49 -060023328 {
23329 if (!value) return "{}";
23330 std::string result;
23331 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
23332 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
23333 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
23334 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
23335 return "{" + result.substr(0, result.size() - 3) + "}";
23336 }
23337
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023338 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
Lenny Komow96962992016-08-31 15:03:49 -060023339 {
23340 switch (value)
23341 {
23342 case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
23343 case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
23344 case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
23345 default: return "invalid";
23346 }
23347 }
23348
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023349 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
Lenny Komow96962992016-08-31 15:03:49 -060023350 {
23351 if (!value) return "{}";
23352 std::string result;
23353 if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
23354 if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
23355 if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
23356 return "{" + result.substr(0, result.size() - 3) + "}";
23357 }
23358
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023359 VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
Lenny Komow5b8df842016-09-29 14:16:59 -060023360 {
23361 switch (value)
23362 {
23363 case ValidationCheckEXT::eAll: return "All";
23364 default: return "invalid";
23365 }
23366 }
23367
Mark Lobodzinski7e418a42016-12-12 09:44:34 -070023368 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
23369 {
23370 switch (value)
23371 {
23372 case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
23373 case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
23374 case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
23375 case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
23376 default: return "invalid";
23377 }
23378 }
23379
23380 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
23381 {
23382 if (!value) return "{}";
23383 std::string result;
23384 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
23385 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
23386 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
23387 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
23388 return "{" + result.substr(0, result.size() - 3) + "}";
23389 }
23390
23391 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
23392 {
23393 switch (value)
23394 {
23395 case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
23396 case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
23397 default: return "invalid";
23398 }
23399 }
23400
23401 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
23402 {
23403 if (!value) return "{}";
23404 std::string result;
23405 if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
23406 if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
23407 return "{" + result.substr(0, result.size() - 3) + "}";
23408 }
23409
23410 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
23411 {
23412 switch (value)
23413 {
23414 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline: return "VkIndirectCommandsTokenPipeline";
23415 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDescriptorSet: return "VkIndirectCommandsTokenDescriptorSet";
23416 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenIndexBuffer: return "VkIndirectCommandsTokenIndexBuffer";
23417 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenVertexBuffer: return "VkIndirectCommandsTokenVertexBuffer";
23418 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPushConstant: return "VkIndirectCommandsTokenPushConstant";
23419 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDrawIndexed: return "VkIndirectCommandsTokenDrawIndexed";
23420 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDraw: return "VkIndirectCommandsTokenDraw";
23421 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDispatch: return "VkIndirectCommandsTokenDispatch";
23422 default: return "invalid";
23423 }
23424 }
23425
23426 VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
23427 {
23428 switch (value)
23429 {
23430 case ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet: return "VkObjectEntryDescriptorSet";
23431 case ObjectEntryTypeNVX::eVkObjectEntryPipeline: return "VkObjectEntryPipeline";
23432 case ObjectEntryTypeNVX::eVkObjectEntryIndexBuffer: return "VkObjectEntryIndexBuffer";
23433 case ObjectEntryTypeNVX::eVkObjectEntryVertexBuffer: return "VkObjectEntryVertexBuffer";
23434 case ObjectEntryTypeNVX::eVkObjectEntryPushConstant: return "VkObjectEntryPushConstant";
23435 default: return "invalid";
23436 }
23437 }
23438
Mark Youngb5f087a2017-01-19 21:10:49 -070023439 VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value)
23440 {
23441 switch (value)
23442 {
23443 case SurfaceCounterFlagBitsEXT::eVblankExt: return "VblankExt";
23444 default: return "invalid";
23445 }
23446 }
23447
23448 VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value)
23449 {
23450 if (!value) return "{}";
23451 std::string result;
23452 if (value & SurfaceCounterFlagBitsEXT::eVblankExt) result += "VblankExt | ";
23453 return "{" + result.substr(0, result.size() - 3) + "}";
23454 }
23455
23456 VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value)
23457 {
23458 switch (value)
23459 {
23460 case DisplayPowerStateEXT::eOff: return "Off";
23461 case DisplayPowerStateEXT::eSuspend: return "Suspend";
23462 case DisplayPowerStateEXT::eOn: return "On";
23463 default: return "invalid";
23464 }
23465 }
23466
23467 VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value)
23468 {
23469 switch (value)
23470 {
23471 case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
23472 default: return "invalid";
23473 }
23474 }
23475
23476 VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value)
23477 {
23478 switch (value)
23479 {
23480 case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
23481 default: return "invalid";
23482 }
23483 }
23484
Lenny Komowb0a17f22016-08-11 11:23:15 -060023485} // namespace vk
23486
23487#endif