blob: 341e588930a74c619508dc286ce7ad3be9779814 [file] [log] [blame]
Mark Young39389872017-01-19 21:10:49 -07001// Copyright (c) 2015-2017 The Khronos Group Inc.
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002//
3// Permission is hereby granted, free of charge, to any person obtaining a
4// copy of this software and/or associated documentation files (the
5// "Materials"), to deal in the Materials without restriction, including
6// without limitation the rights to use, copy, modify, merge, publish,
7// distribute, sublicense, and/or sell copies of the Materials, and to
8// permit persons to whom the Materials are furnished to do so, subject to
9// the following conditions:
10//
11// The above copyright notice and this permission notice shall be included
12// in all copies or substantial portions of the Materials.
13//
14// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
17// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
18// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
19// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
20// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
21
22// This header is generated from the Khronos Vulkan XML API Registry.
23
24
25#ifndef VULKAN_HPP
26#define VULKAN_HPP
27
28#include <algorithm>
29#include <array>
30#include <cassert>
31#include <cstdint>
32#include <cstring>
33#include <initializer_list>
34#include <string>
35#include <system_error>
Mark Lobodzinski2d589822016-12-12 09:44:34 -070036#include <tuple>
Lenny Komowbed9b5c2016-08-11 11:23:15 -060037#include <type_traits>
38#include <vulkan/vulkan.h>
39#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
40# include <memory>
41# include <vector>
42#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
43
Mark Lobodzinski36c33862017-02-13 10:15:53 -070044static_assert( VK_HEADER_VERSION == 40 , "Wrong VK_HEADER_VERSION!" );
Lenny Komowbed9b5c2016-08-11 11:23:15 -060045
46// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
47// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
Endre Oma5d2c7ec2016-09-01 17:56:41 +020048#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
Mark Lobodzinski36c33862017-02-13 10:15:53 -070049# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
50# define VULKAN_HPP_TYPESAFE_CONVERSION
51# endif
Lenny Komowbed9b5c2016-08-11 11:23:15 -060052#endif
53
54#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
55# if defined(__clang__)
56# if __has_feature(cxx_unrestricted_unions)
57# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
58# endif
59# elif defined(__GNUC__)
Lenny Komow6501c122016-08-31 15:03:49 -060060# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060061# if 40600 <= GCC_VERSION
62# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
63# endif
64# elif defined(_MSC_VER)
65# if 1900 <= _MSC_VER
66# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
67# endif
68# endif
69#endif
70
Mark Lobodzinski2d589822016-12-12 09:44:34 -070071
72#if !defined(VULKAN_HPP_INLINE)
73# if defined(__clang___)
74# if __has_attribute(always_inline)
75# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
76# else
77# define VULKAN_HPP_INLINE inline
78# endif
79# elif defined(__GNUC__)
80# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
81# elif defined(_MSC_VER)
82# define VULKAN_HPP_INLINE __forceinline
83# else
84# define VULKAN_HPP_INLINE inline
85# endif
86#endif
87
Lenny Komowbed9b5c2016-08-11 11:23:15 -060088namespace vk
89{
Mark Lobodzinski2d589822016-12-12 09:44:34 -070090 template <typename FlagBitsType> struct FlagTraits
91 {
92 enum { allFlags = 0 };
93 };
94
Lenny Komowbed9b5c2016-08-11 11:23:15 -060095 template <typename BitType, typename MaskType = VkFlags>
96 class Flags
97 {
98 public:
99 Flags()
100 : m_mask(0)
101 {
102 }
103
104 Flags(BitType bit)
105 : m_mask(static_cast<MaskType>(bit))
106 {
107 }
108
109 Flags(Flags<BitType> const& rhs)
110 : m_mask(rhs.m_mask)
111 {
112 }
113
114 Flags<BitType> & operator=(Flags<BitType> const& rhs)
115 {
116 m_mask = rhs.m_mask;
117 return *this;
118 }
119
120 Flags<BitType> & operator|=(Flags<BitType> const& rhs)
121 {
122 m_mask |= rhs.m_mask;
123 return *this;
124 }
125
126 Flags<BitType> & operator&=(Flags<BitType> const& rhs)
127 {
128 m_mask &= rhs.m_mask;
129 return *this;
130 }
131
132 Flags<BitType> & operator^=(Flags<BitType> const& rhs)
133 {
134 m_mask ^= rhs.m_mask;
135 return *this;
136 }
137
138 Flags<BitType> operator|(Flags<BitType> const& rhs) const
139 {
140 Flags<BitType> result(*this);
141 result |= rhs;
142 return result;
143 }
144
145 Flags<BitType> operator&(Flags<BitType> const& rhs) const
146 {
147 Flags<BitType> result(*this);
148 result &= rhs;
149 return result;
150 }
151
152 Flags<BitType> operator^(Flags<BitType> const& rhs) const
153 {
154 Flags<BitType> result(*this);
155 result ^= rhs;
156 return result;
157 }
158
159 bool operator!() const
160 {
161 return !m_mask;
162 }
163
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700164 Flags<BitType> operator~() const
165 {
166 Flags<BitType> result(*this);
167 result.m_mask ^= FlagTraits<BitType>::allFlags;
168 return result;
169 }
170
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600171 bool operator==(Flags<BitType> const& rhs) const
172 {
173 return m_mask == rhs.m_mask;
174 }
175
176 bool operator!=(Flags<BitType> const& rhs) const
177 {
178 return m_mask != rhs.m_mask;
179 }
180
181 explicit operator bool() const
182 {
183 return !!m_mask;
184 }
185
186 explicit operator MaskType() const
187 {
188 return m_mask;
189 }
190
191 private:
192 MaskType m_mask;
193 };
194
195 template <typename BitType>
196 Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags)
197 {
198 return flags | bit;
199 }
200
201 template <typename BitType>
202 Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags)
203 {
204 return flags & bit;
205 }
206
207 template <typename BitType>
208 Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags)
209 {
210 return flags ^ bit;
211 }
212
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700213
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600214 template <typename RefType>
215 class Optional
216 {
217 public:
218 Optional(RefType & reference) { m_ptr = &reference; }
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700219 Optional(RefType * ptr) { m_ptr = ptr; }
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600220 Optional(std::nullptr_t) { m_ptr = nullptr; }
221
222 operator RefType*() const { return m_ptr; }
223 RefType const* operator->() const { return m_ptr; }
224 explicit operator bool() const { return !!m_ptr; }
225
226 private:
227 RefType *m_ptr;
228 };
229
230#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
231 template <typename T>
232 class ArrayProxy
233 {
234 public:
235 ArrayProxy(std::nullptr_t)
236 : m_count(0)
237 , m_ptr(nullptr)
238 {}
239
240 ArrayProxy(T & ptr)
241 : m_count(1)
242 , m_ptr(&ptr)
243 {}
244
245 ArrayProxy(uint32_t count, T * ptr)
246 : m_count(count)
247 , m_ptr(ptr)
248 {}
249
250 template <size_t N>
251 ArrayProxy(std::array<typename std::remove_const<T>::type, N> & data)
252 : m_count(N)
253 , m_ptr(data.data())
254 {}
255
256 template <size_t N>
257 ArrayProxy(std::array<typename std::remove_const<T>::type, N> const& data)
258 : m_count(N)
259 , m_ptr(data.data())
260 {}
261
262 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
263 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> & data)
264 : m_count(static_cast<uint32_t>(data.size()))
265 , m_ptr(data.data())
266 {}
267
268 template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
269 ArrayProxy(std::vector<typename std::remove_const<T>::type, Allocator> const& data)
270 : m_count(static_cast<uint32_t>(data.size()))
271 , m_ptr(data.data())
272 {}
273
274 ArrayProxy(std::initializer_list<T> const& data)
275 : m_count(static_cast<uint32_t>(data.end() - data.begin()))
276 , m_ptr(data.begin())
277 {}
278
279 const T * begin() const
280 {
281 return m_ptr;
282 }
283
284 const T * end() const
285 {
286 return m_ptr + m_count;
287 }
288
289 const T & front() const
290 {
291 assert(m_count && m_ptr);
292 return *m_ptr;
293 }
294
295 const T & back() const
296 {
297 assert(m_count && m_ptr);
298 return *(m_ptr + m_count - 1);
299 }
300
301 bool empty() const
302 {
303 return (m_count == 0);
304 }
305
306 uint32_t size() const
307 {
308 return m_count;
309 }
310
311 T * data() const
312 {
313 return m_ptr;
314 }
315
316 private:
317 uint32_t m_count;
318 T * m_ptr;
319 };
320#endif
321
Mark Lobodzinski36c33862017-02-13 10:15:53 -0700322
323#if defined(VULKAN_HPP_NO_EXCEPTIONS) && !defined(VULKAN_HPP_NO_SMART_HANDLE)
324# define VULKAN_HPP_NO_SMART_HANDLE
325#endif
326
327#ifndef VULKAN_HPP_NO_SMART_HANDLE
328 template <typename Type, typename Deleter>
329 class UniqueHandle
330 {
331 public:
332 explicit UniqueHandle( Type const& value = Type(), Deleter const& deleter = Deleter() )
333 : m_value( value )
334 , m_deleter( deleter )
335 {}
336
337 UniqueHandle( UniqueHandle const& ) = delete;
338
339 UniqueHandle( UniqueHandle && other )
340 : m_value( other.release() )
341 , m_deleter( std::move( other.m_deleter ) )
342 {}
343
344 ~UniqueHandle()
345 {
346 destroy();
347 }
348
349 UniqueHandle & operator=( UniqueHandle const& ) = delete;
350
351 UniqueHandle & operator=( UniqueHandle && other )
352 {
353 reset( other.release() );
354 m_deleter = std::move( other.m_deleter );
355 return *this;
356 }
357
358 explicit operator bool() const
359 {
360 return m_value.operator bool();
361 }
362
363 Type const* operator->() const
364 {
365 return &m_value;
366 }
367
368 Type const& operator*() const
369 {
370 return m_value;
371 }
372
373 Type get() const
374 {
375 return m_value;
376 }
377
378 Deleter & getDeleter()
379 {
380 return m_deleter;
381 }
382
383 Deleter const& getDeleter() const
384 {
385 return m_deleter;
386 }
387
388 void reset( Type const& value = Type() )
389 {
390 if ( m_value != value )
391 {
392 destroy();
393 m_value = value;
394 }
395 }
396
397 Type release()
398 {
399 Type value = m_value;
400 m_value = nullptr;
401 return value;
402 }
403
404 void swap( UniqueHandle<Type, Deleter> & rhs )
405 {
406 std::swap(m_value, rhs.m_value);
407 std::swap(m_deleter, rhs.m_deleter);
408 }
409
410 private:
411 void destroy()
412 {
413 if ( m_value )
414 {
415 m_deleter( m_value );
416 }
417 }
418
419 private:
420 Type m_value;
421 Deleter m_deleter;
422 };
423
424 template <typename Type, typename Deleter>
425 VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Deleter> & lhs, UniqueHandle<Type,Deleter> & rhs )
426 {
427 lhs.swap( rhs );
428 }
429#endif
430
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600431 enum class Result
432 {
433 eSuccess = VK_SUCCESS,
434 eNotReady = VK_NOT_READY,
435 eTimeout = VK_TIMEOUT,
436 eEventSet = VK_EVENT_SET,
437 eEventReset = VK_EVENT_RESET,
438 eIncomplete = VK_INCOMPLETE,
439 eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
440 eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
441 eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
442 eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
443 eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
444 eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
445 eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
446 eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
447 eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
448 eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
449 eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
Lenny Komowebf33162016-08-26 14:10:08 -0600450 eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600451 eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
452 eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
453 eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
454 eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
455 eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
456 eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
Mark Young39389872017-01-19 21:10:49 -0700457 eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
458 eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600459 };
460
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700461 VULKAN_HPP_INLINE std::string to_string(Result value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600462 {
463 switch (value)
464 {
465 case Result::eSuccess: return "Success";
466 case Result::eNotReady: return "NotReady";
467 case Result::eTimeout: return "Timeout";
468 case Result::eEventSet: return "EventSet";
469 case Result::eEventReset: return "EventReset";
470 case Result::eIncomplete: return "Incomplete";
471 case Result::eErrorOutOfHostMemory: return "ErrorOutOfHostMemory";
472 case Result::eErrorOutOfDeviceMemory: return "ErrorOutOfDeviceMemory";
473 case Result::eErrorInitializationFailed: return "ErrorInitializationFailed";
474 case Result::eErrorDeviceLost: return "ErrorDeviceLost";
475 case Result::eErrorMemoryMapFailed: return "ErrorMemoryMapFailed";
476 case Result::eErrorLayerNotPresent: return "ErrorLayerNotPresent";
477 case Result::eErrorExtensionNotPresent: return "ErrorExtensionNotPresent";
478 case Result::eErrorFeatureNotPresent: return "ErrorFeatureNotPresent";
479 case Result::eErrorIncompatibleDriver: return "ErrorIncompatibleDriver";
480 case Result::eErrorTooManyObjects: return "ErrorTooManyObjects";
481 case Result::eErrorFormatNotSupported: return "ErrorFormatNotSupported";
Lenny Komowebf33162016-08-26 14:10:08 -0600482 case Result::eErrorFragmentedPool: return "ErrorFragmentedPool";
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600483 case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR";
484 case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR";
485 case Result::eSuboptimalKHR: return "SuboptimalKHR";
486 case Result::eErrorOutOfDateKHR: return "ErrorOutOfDateKHR";
487 case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR";
488 case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT";
489 case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV";
Mark Young39389872017-01-19 21:10:49 -0700490 case Result::eErrorOutOfPoolMemoryKHR: return "ErrorOutOfPoolMemoryKHR";
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600491 default: return "invalid";
492 }
493 }
494
495#if defined(_MSC_VER) && (_MSC_VER == 1800)
496# define noexcept _NOEXCEPT
497#endif
498
499 class ErrorCategoryImpl : public std::error_category
500 {
501 public:
502 virtual const char* name() const noexcept override { return "vk::Result"; }
503 virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
504 };
505
506#if defined(_MSC_VER) && (_MSC_VER == 1800)
507# undef noexcept
508#endif
509
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700510 VULKAN_HPP_INLINE const std::error_category& errorCategory()
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600511 {
512 static ErrorCategoryImpl instance;
513 return instance;
514 }
515
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700516 VULKAN_HPP_INLINE std::error_code make_error_code(Result e)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600517 {
518 return std::error_code(static_cast<int>(e), errorCategory());
519 }
520
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700521 VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e)
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600522 {
523 return std::error_condition(static_cast<int>(e), errorCategory());
524 }
525
526} // namespace vk
527
528namespace std
529{
530 template <>
531 struct is_error_code_enum<vk::Result> : public true_type
532 {};
533}
534
535namespace vk
536{
537 template <typename T>
538 struct ResultValue
539 {
540 ResultValue( Result r, T & v )
541 : result( r )
542 , value( v )
543 {}
544
545 Result result;
546 T value;
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700547
548 operator std::tuple<Result&, T&>() { return std::tuple<Result&, T&>(result, value); }
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600549 };
550
551 template <typename T>
552 struct ResultValueType
553 {
554#ifdef VULKAN_HPP_NO_EXCEPTIONS
555 typedef ResultValue<T> type;
556#else
557 typedef T type;
558#endif
559 };
560
561 template <> struct ResultValueType<void>
562 {
563#ifdef VULKAN_HPP_NO_EXCEPTIONS
564 typedef Result type;
565#else
566 typedef void type;
567#endif
568 };
569
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700570 VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600571 {
572#ifdef VULKAN_HPP_NO_EXCEPTIONS
573 assert( result == Result::eSuccess );
574 return result;
575#else
576 if ( result != Result::eSuccess )
577 {
578 throw std::system_error( result, message );
579 }
580#endif
581 }
582
583 template <typename T>
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700584 VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600585 {
586#ifdef VULKAN_HPP_NO_EXCEPTIONS
587 assert( result == Result::eSuccess );
588 return ResultValue<T>( result, data );
589#else
590 if ( result != Result::eSuccess )
591 {
592 throw std::system_error( result, message );
593 }
594 return data;
595#endif
596 }
597
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700598 VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600599 {
600#ifdef VULKAN_HPP_NO_EXCEPTIONS
601 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
602#else
603 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
604 {
605 throw std::system_error( result, message );
606 }
607#endif
608 return result;
609 }
610
611 template <typename T>
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700612 VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600613 {
614#ifdef VULKAN_HPP_NO_EXCEPTIONS
615 assert( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
616#else
617 if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
618 {
619 throw std::system_error( result, message );
620 }
621#endif
622 return ResultValue<T>( result, data );
623 }
624
625 using SampleMask = uint32_t;
626
627 using Bool32 = uint32_t;
628
629 using DeviceSize = uint64_t;
630
631 enum class FramebufferCreateFlagBits
632 {
633 };
634
635 using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits, VkFramebufferCreateFlags>;
636
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700637 VULKAN_HPP_INLINE FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600638 {
639 return FramebufferCreateFlags( bit0 ) | bit1;
640 }
641
642 enum class QueryPoolCreateFlagBits
643 {
644 };
645
646 using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits, VkQueryPoolCreateFlags>;
647
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700648 VULKAN_HPP_INLINE QueryPoolCreateFlags operator|( QueryPoolCreateFlagBits bit0, QueryPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600649 {
650 return QueryPoolCreateFlags( bit0 ) | bit1;
651 }
652
653 enum class RenderPassCreateFlagBits
654 {
655 };
656
657 using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits, VkRenderPassCreateFlags>;
658
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700659 VULKAN_HPP_INLINE RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600660 {
661 return RenderPassCreateFlags( bit0 ) | bit1;
662 }
663
664 enum class SamplerCreateFlagBits
665 {
666 };
667
668 using SamplerCreateFlags = Flags<SamplerCreateFlagBits, VkSamplerCreateFlags>;
669
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700670 VULKAN_HPP_INLINE SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600671 {
672 return SamplerCreateFlags( bit0 ) | bit1;
673 }
674
675 enum class PipelineLayoutCreateFlagBits
676 {
677 };
678
679 using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits, VkPipelineLayoutCreateFlags>;
680
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700681 VULKAN_HPP_INLINE PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, PipelineLayoutCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600682 {
683 return PipelineLayoutCreateFlags( bit0 ) | bit1;
684 }
685
686 enum class PipelineCacheCreateFlagBits
687 {
688 };
689
690 using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits, VkPipelineCacheCreateFlags>;
691
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700692 VULKAN_HPP_INLINE PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600693 {
694 return PipelineCacheCreateFlags( bit0 ) | bit1;
695 }
696
697 enum class PipelineDepthStencilStateCreateFlagBits
698 {
699 };
700
701 using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits, VkPipelineDepthStencilStateCreateFlags>;
702
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700703 VULKAN_HPP_INLINE PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, PipelineDepthStencilStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600704 {
705 return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1;
706 }
707
708 enum class PipelineDynamicStateCreateFlagBits
709 {
710 };
711
712 using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits, VkPipelineDynamicStateCreateFlags>;
713
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700714 VULKAN_HPP_INLINE PipelineDynamicStateCreateFlags operator|( PipelineDynamicStateCreateFlagBits bit0, PipelineDynamicStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600715 {
716 return PipelineDynamicStateCreateFlags( bit0 ) | bit1;
717 }
718
719 enum class PipelineColorBlendStateCreateFlagBits
720 {
721 };
722
723 using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits, VkPipelineColorBlendStateCreateFlags>;
724
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700725 VULKAN_HPP_INLINE PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, PipelineColorBlendStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600726 {
727 return PipelineColorBlendStateCreateFlags( bit0 ) | bit1;
728 }
729
730 enum class PipelineMultisampleStateCreateFlagBits
731 {
732 };
733
734 using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits, VkPipelineMultisampleStateCreateFlags>;
735
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700736 VULKAN_HPP_INLINE PipelineMultisampleStateCreateFlags operator|( PipelineMultisampleStateCreateFlagBits bit0, PipelineMultisampleStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600737 {
738 return PipelineMultisampleStateCreateFlags( bit0 ) | bit1;
739 }
740
741 enum class PipelineRasterizationStateCreateFlagBits
742 {
743 };
744
745 using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits, VkPipelineRasterizationStateCreateFlags>;
746
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700747 VULKAN_HPP_INLINE PipelineRasterizationStateCreateFlags operator|( PipelineRasterizationStateCreateFlagBits bit0, PipelineRasterizationStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600748 {
749 return PipelineRasterizationStateCreateFlags( bit0 ) | bit1;
750 }
751
752 enum class PipelineViewportStateCreateFlagBits
753 {
754 };
755
756 using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits, VkPipelineViewportStateCreateFlags>;
757
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700758 VULKAN_HPP_INLINE PipelineViewportStateCreateFlags operator|( PipelineViewportStateCreateFlagBits bit0, PipelineViewportStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600759 {
760 return PipelineViewportStateCreateFlags( bit0 ) | bit1;
761 }
762
763 enum class PipelineTessellationStateCreateFlagBits
764 {
765 };
766
767 using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits, VkPipelineTessellationStateCreateFlags>;
768
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700769 VULKAN_HPP_INLINE PipelineTessellationStateCreateFlags operator|( PipelineTessellationStateCreateFlagBits bit0, PipelineTessellationStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600770 {
771 return PipelineTessellationStateCreateFlags( bit0 ) | bit1;
772 }
773
774 enum class PipelineInputAssemblyStateCreateFlagBits
775 {
776 };
777
778 using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits, VkPipelineInputAssemblyStateCreateFlags>;
779
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700780 VULKAN_HPP_INLINE PipelineInputAssemblyStateCreateFlags operator|( PipelineInputAssemblyStateCreateFlagBits bit0, PipelineInputAssemblyStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600781 {
782 return PipelineInputAssemblyStateCreateFlags( bit0 ) | bit1;
783 }
784
785 enum class PipelineVertexInputStateCreateFlagBits
786 {
787 };
788
789 using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits, VkPipelineVertexInputStateCreateFlags>;
790
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700791 VULKAN_HPP_INLINE PipelineVertexInputStateCreateFlags operator|( PipelineVertexInputStateCreateFlagBits bit0, PipelineVertexInputStateCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600792 {
793 return PipelineVertexInputStateCreateFlags( bit0 ) | bit1;
794 }
795
796 enum class PipelineShaderStageCreateFlagBits
797 {
798 };
799
800 using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits, VkPipelineShaderStageCreateFlags>;
801
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700802 VULKAN_HPP_INLINE PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600803 {
804 return PipelineShaderStageCreateFlags( bit0 ) | bit1;
805 }
806
807 enum class DescriptorSetLayoutCreateFlagBits
808 {
809 };
810
811 using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits, VkDescriptorSetLayoutCreateFlags>;
812
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700813 VULKAN_HPP_INLINE DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600814 {
815 return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
816 }
817
818 enum class BufferViewCreateFlagBits
819 {
820 };
821
822 using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits, VkBufferViewCreateFlags>;
823
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700824 VULKAN_HPP_INLINE BufferViewCreateFlags operator|( BufferViewCreateFlagBits bit0, BufferViewCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600825 {
826 return BufferViewCreateFlags( bit0 ) | bit1;
827 }
828
829 enum class InstanceCreateFlagBits
830 {
831 };
832
833 using InstanceCreateFlags = Flags<InstanceCreateFlagBits, VkInstanceCreateFlags>;
834
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700835 VULKAN_HPP_INLINE InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600836 {
837 return InstanceCreateFlags( bit0 ) | bit1;
838 }
839
840 enum class DeviceCreateFlagBits
841 {
842 };
843
844 using DeviceCreateFlags = Flags<DeviceCreateFlagBits, VkDeviceCreateFlags>;
845
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700846 VULKAN_HPP_INLINE DeviceCreateFlags operator|( DeviceCreateFlagBits bit0, DeviceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600847 {
848 return DeviceCreateFlags( bit0 ) | bit1;
849 }
850
851 enum class DeviceQueueCreateFlagBits
852 {
853 };
854
855 using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits, VkDeviceQueueCreateFlags>;
856
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700857 VULKAN_HPP_INLINE DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600858 {
859 return DeviceQueueCreateFlags( bit0 ) | bit1;
860 }
861
862 enum class ImageViewCreateFlagBits
863 {
864 };
865
866 using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits, VkImageViewCreateFlags>;
867
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700868 VULKAN_HPP_INLINE ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600869 {
870 return ImageViewCreateFlags( bit0 ) | bit1;
871 }
872
873 enum class SemaphoreCreateFlagBits
874 {
875 };
876
877 using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits, VkSemaphoreCreateFlags>;
878
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700879 VULKAN_HPP_INLINE SemaphoreCreateFlags operator|( SemaphoreCreateFlagBits bit0, SemaphoreCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600880 {
881 return SemaphoreCreateFlags( bit0 ) | bit1;
882 }
883
884 enum class ShaderModuleCreateFlagBits
885 {
886 };
887
888 using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits, VkShaderModuleCreateFlags>;
889
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700890 VULKAN_HPP_INLINE ShaderModuleCreateFlags operator|( ShaderModuleCreateFlagBits bit0, ShaderModuleCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600891 {
892 return ShaderModuleCreateFlags( bit0 ) | bit1;
893 }
894
895 enum class EventCreateFlagBits
896 {
897 };
898
899 using EventCreateFlags = Flags<EventCreateFlagBits, VkEventCreateFlags>;
900
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700901 VULKAN_HPP_INLINE EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600902 {
903 return EventCreateFlags( bit0 ) | bit1;
904 }
905
906 enum class MemoryMapFlagBits
907 {
908 };
909
910 using MemoryMapFlags = Flags<MemoryMapFlagBits, VkMemoryMapFlags>;
911
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700912 VULKAN_HPP_INLINE MemoryMapFlags operator|( MemoryMapFlagBits bit0, MemoryMapFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600913 {
914 return MemoryMapFlags( bit0 ) | bit1;
915 }
916
917 enum class SubpassDescriptionFlagBits
918 {
919 };
920
921 using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits, VkSubpassDescriptionFlags>;
922
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700923 VULKAN_HPP_INLINE SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600924 {
925 return SubpassDescriptionFlags( bit0 ) | bit1;
926 }
927
928 enum class DescriptorPoolResetFlagBits
929 {
930 };
931
932 using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits, VkDescriptorPoolResetFlags>;
933
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700934 VULKAN_HPP_INLINE DescriptorPoolResetFlags operator|( DescriptorPoolResetFlagBits bit0, DescriptorPoolResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600935 {
936 return DescriptorPoolResetFlags( bit0 ) | bit1;
937 }
938
939 enum class SwapchainCreateFlagBitsKHR
940 {
941 };
942
943 using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR, VkSwapchainCreateFlagsKHR>;
944
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700945 VULKAN_HPP_INLINE SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600946 {
947 return SwapchainCreateFlagsKHR( bit0 ) | bit1;
948 }
949
950 enum class DisplayModeCreateFlagBitsKHR
951 {
952 };
953
954 using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR, VkDisplayModeCreateFlagsKHR>;
955
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700956 VULKAN_HPP_INLINE DisplayModeCreateFlagsKHR operator|( DisplayModeCreateFlagBitsKHR bit0, DisplayModeCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600957 {
958 return DisplayModeCreateFlagsKHR( bit0 ) | bit1;
959 }
960
961 enum class DisplaySurfaceCreateFlagBitsKHR
962 {
963 };
964
965 using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR, VkDisplaySurfaceCreateFlagsKHR>;
966
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700967 VULKAN_HPP_INLINE DisplaySurfaceCreateFlagsKHR operator|( DisplaySurfaceCreateFlagBitsKHR bit0, DisplaySurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600968 {
969 return DisplaySurfaceCreateFlagsKHR( bit0 ) | bit1;
970 }
971
972#ifdef VK_USE_PLATFORM_ANDROID_KHR
973 enum class AndroidSurfaceCreateFlagBitsKHR
974 {
975 };
976#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
977
978#ifdef VK_USE_PLATFORM_ANDROID_KHR
979 using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR, VkAndroidSurfaceCreateFlagsKHR>;
980
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700981 VULKAN_HPP_INLINE AndroidSurfaceCreateFlagsKHR operator|( AndroidSurfaceCreateFlagBitsKHR bit0, AndroidSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600982 {
983 return AndroidSurfaceCreateFlagsKHR( bit0 ) | bit1;
984 }
985#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
986
987#ifdef VK_USE_PLATFORM_MIR_KHR
988 enum class MirSurfaceCreateFlagBitsKHR
989 {
990 };
991#endif /*VK_USE_PLATFORM_MIR_KHR*/
992
993#ifdef VK_USE_PLATFORM_MIR_KHR
994 using MirSurfaceCreateFlagsKHR = Flags<MirSurfaceCreateFlagBitsKHR, VkMirSurfaceCreateFlagsKHR>;
995
Mark Lobodzinski2d589822016-12-12 09:44:34 -0700996 VULKAN_HPP_INLINE MirSurfaceCreateFlagsKHR operator|( MirSurfaceCreateFlagBitsKHR bit0, MirSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -0600997 {
998 return MirSurfaceCreateFlagsKHR( bit0 ) | bit1;
999 }
1000#endif /*VK_USE_PLATFORM_MIR_KHR*/
1001
Mark Young39389872017-01-19 21:10:49 -07001002#ifdef VK_USE_PLATFORM_VI_NN
1003 enum class ViSurfaceCreateFlagBitsNN
1004 {
1005 };
1006#endif /*VK_USE_PLATFORM_VI_NN*/
1007
1008#ifdef VK_USE_PLATFORM_VI_NN
1009 using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN, VkViSurfaceCreateFlagsNN>;
1010
1011 VULKAN_HPP_INLINE ViSurfaceCreateFlagsNN operator|( ViSurfaceCreateFlagBitsNN bit0, ViSurfaceCreateFlagBitsNN bit1 )
1012 {
1013 return ViSurfaceCreateFlagsNN( bit0 ) | bit1;
1014 }
1015#endif /*VK_USE_PLATFORM_VI_NN*/
1016
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001017#ifdef VK_USE_PLATFORM_WAYLAND_KHR
1018 enum class WaylandSurfaceCreateFlagBitsKHR
1019 {
1020 };
1021#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
1022
1023#ifdef VK_USE_PLATFORM_WAYLAND_KHR
1024 using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR, VkWaylandSurfaceCreateFlagsKHR>;
1025
Mark Lobodzinski2d589822016-12-12 09:44:34 -07001026 VULKAN_HPP_INLINE WaylandSurfaceCreateFlagsKHR operator|( WaylandSurfaceCreateFlagBitsKHR bit0, WaylandSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001027 {
1028 return WaylandSurfaceCreateFlagsKHR( bit0 ) | bit1;
1029 }
1030#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
1031
1032#ifdef VK_USE_PLATFORM_WIN32_KHR
1033 enum class Win32SurfaceCreateFlagBitsKHR
1034 {
1035 };
1036#endif /*VK_USE_PLATFORM_WIN32_KHR*/
1037
1038#ifdef VK_USE_PLATFORM_WIN32_KHR
1039 using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR, VkWin32SurfaceCreateFlagsKHR>;
1040
Mark Lobodzinski2d589822016-12-12 09:44:34 -07001041 VULKAN_HPP_INLINE Win32SurfaceCreateFlagsKHR operator|( Win32SurfaceCreateFlagBitsKHR bit0, Win32SurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001042 {
1043 return Win32SurfaceCreateFlagsKHR( bit0 ) | bit1;
1044 }
1045#endif /*VK_USE_PLATFORM_WIN32_KHR*/
1046
1047#ifdef VK_USE_PLATFORM_XLIB_KHR
1048 enum class XlibSurfaceCreateFlagBitsKHR
1049 {
1050 };
1051#endif /*VK_USE_PLATFORM_XLIB_KHR*/
1052
1053#ifdef VK_USE_PLATFORM_XLIB_KHR
1054 using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR, VkXlibSurfaceCreateFlagsKHR>;
1055
Mark Lobodzinski2d589822016-12-12 09:44:34 -07001056 VULKAN_HPP_INLINE XlibSurfaceCreateFlagsKHR operator|( XlibSurfaceCreateFlagBitsKHR bit0, XlibSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001057 {
1058 return XlibSurfaceCreateFlagsKHR( bit0 ) | bit1;
1059 }
1060#endif /*VK_USE_PLATFORM_XLIB_KHR*/
1061
1062#ifdef VK_USE_PLATFORM_XCB_KHR
1063 enum class XcbSurfaceCreateFlagBitsKHR
1064 {
1065 };
1066#endif /*VK_USE_PLATFORM_XCB_KHR*/
1067
1068#ifdef VK_USE_PLATFORM_XCB_KHR
1069 using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR, VkXcbSurfaceCreateFlagsKHR>;
1070
Mark Lobodzinski2d589822016-12-12 09:44:34 -07001071 VULKAN_HPP_INLINE XcbSurfaceCreateFlagsKHR operator|( XcbSurfaceCreateFlagBitsKHR bit0, XcbSurfaceCreateFlagBitsKHR bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001072 {
1073 return XcbSurfaceCreateFlagsKHR( bit0 ) | bit1;
1074 }
1075#endif /*VK_USE_PLATFORM_XCB_KHR*/
1076
Mark Young39389872017-01-19 21:10:49 -07001077 enum class CommandPoolTrimFlagBitsKHR
1078 {
1079 };
1080
1081 using CommandPoolTrimFlagsKHR = Flags<CommandPoolTrimFlagBitsKHR, VkCommandPoolTrimFlagsKHR>;
1082
1083 VULKAN_HPP_INLINE CommandPoolTrimFlagsKHR operator|( CommandPoolTrimFlagBitsKHR bit0, CommandPoolTrimFlagBitsKHR bit1 )
1084 {
1085 return CommandPoolTrimFlagsKHR( bit0 ) | bit1;
1086 }
1087
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001088 class DeviceMemory
1089 {
1090 public:
1091 DeviceMemory()
1092 : m_deviceMemory(VK_NULL_HANDLE)
1093 {}
1094
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001095 DeviceMemory( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001096 : m_deviceMemory(VK_NULL_HANDLE)
1097 {}
1098
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001099#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1100 DeviceMemory(VkDeviceMemory deviceMemory)
1101 : m_deviceMemory(deviceMemory)
1102 {}
1103
1104 DeviceMemory& operator=(VkDeviceMemory deviceMemory)
1105 {
1106 m_deviceMemory = deviceMemory;
1107 return *this;
1108 }
1109#endif
1110
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001111 DeviceMemory& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001112 {
1113 m_deviceMemory = VK_NULL_HANDLE;
1114 return *this;
1115 }
1116
Lenny Komowebf33162016-08-26 14:10:08 -06001117 bool operator==(DeviceMemory const &rhs) const
1118 {
1119 return m_deviceMemory == rhs.m_deviceMemory;
1120 }
1121
1122 bool operator!=(DeviceMemory const &rhs) const
1123 {
1124 return m_deviceMemory != rhs.m_deviceMemory;
1125 }
1126
1127 bool operator<(DeviceMemory const &rhs) const
1128 {
1129 return m_deviceMemory < rhs.m_deviceMemory;
1130 }
1131
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001132#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1133 explicit
1134#endif
1135 operator VkDeviceMemory() const
1136 {
1137 return m_deviceMemory;
1138 }
1139
1140 explicit operator bool() const
1141 {
1142 return m_deviceMemory != VK_NULL_HANDLE;
1143 }
1144
1145 bool operator!() const
1146 {
1147 return m_deviceMemory == VK_NULL_HANDLE;
1148 }
1149
1150 private:
1151 VkDeviceMemory m_deviceMemory;
1152 };
1153 static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
1154
1155 class CommandPool
1156 {
1157 public:
1158 CommandPool()
1159 : m_commandPool(VK_NULL_HANDLE)
1160 {}
1161
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001162 CommandPool( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001163 : m_commandPool(VK_NULL_HANDLE)
1164 {}
1165
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001166#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1167 CommandPool(VkCommandPool commandPool)
1168 : m_commandPool(commandPool)
1169 {}
1170
1171 CommandPool& operator=(VkCommandPool commandPool)
1172 {
1173 m_commandPool = commandPool;
1174 return *this;
1175 }
1176#endif
1177
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001178 CommandPool& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001179 {
1180 m_commandPool = VK_NULL_HANDLE;
1181 return *this;
1182 }
1183
Lenny Komowebf33162016-08-26 14:10:08 -06001184 bool operator==(CommandPool const &rhs) const
1185 {
1186 return m_commandPool == rhs.m_commandPool;
1187 }
1188
1189 bool operator!=(CommandPool const &rhs) const
1190 {
1191 return m_commandPool != rhs.m_commandPool;
1192 }
1193
1194 bool operator<(CommandPool const &rhs) const
1195 {
1196 return m_commandPool < rhs.m_commandPool;
1197 }
1198
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001199#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1200 explicit
1201#endif
1202 operator VkCommandPool() const
1203 {
1204 return m_commandPool;
1205 }
1206
1207 explicit operator bool() const
1208 {
1209 return m_commandPool != VK_NULL_HANDLE;
1210 }
1211
1212 bool operator!() const
1213 {
1214 return m_commandPool == VK_NULL_HANDLE;
1215 }
1216
1217 private:
1218 VkCommandPool m_commandPool;
1219 };
1220 static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
1221
1222 class Buffer
1223 {
1224 public:
1225 Buffer()
1226 : m_buffer(VK_NULL_HANDLE)
1227 {}
1228
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001229 Buffer( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001230 : m_buffer(VK_NULL_HANDLE)
1231 {}
1232
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001233#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1234 Buffer(VkBuffer buffer)
1235 : m_buffer(buffer)
1236 {}
1237
1238 Buffer& operator=(VkBuffer buffer)
1239 {
1240 m_buffer = buffer;
1241 return *this;
1242 }
1243#endif
1244
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001245 Buffer& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001246 {
1247 m_buffer = VK_NULL_HANDLE;
1248 return *this;
1249 }
1250
Lenny Komowebf33162016-08-26 14:10:08 -06001251 bool operator==(Buffer const &rhs) const
1252 {
1253 return m_buffer == rhs.m_buffer;
1254 }
1255
1256 bool operator!=(Buffer const &rhs) const
1257 {
1258 return m_buffer != rhs.m_buffer;
1259 }
1260
1261 bool operator<(Buffer const &rhs) const
1262 {
1263 return m_buffer < rhs.m_buffer;
1264 }
1265
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001266#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1267 explicit
1268#endif
1269 operator VkBuffer() const
1270 {
1271 return m_buffer;
1272 }
1273
1274 explicit operator bool() const
1275 {
1276 return m_buffer != VK_NULL_HANDLE;
1277 }
1278
1279 bool operator!() const
1280 {
1281 return m_buffer == VK_NULL_HANDLE;
1282 }
1283
1284 private:
1285 VkBuffer m_buffer;
1286 };
1287 static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
1288
1289 class BufferView
1290 {
1291 public:
1292 BufferView()
1293 : m_bufferView(VK_NULL_HANDLE)
1294 {}
1295
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001296 BufferView( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001297 : m_bufferView(VK_NULL_HANDLE)
1298 {}
1299
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001300#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1301 BufferView(VkBufferView bufferView)
1302 : m_bufferView(bufferView)
1303 {}
1304
1305 BufferView& operator=(VkBufferView bufferView)
1306 {
1307 m_bufferView = bufferView;
1308 return *this;
1309 }
1310#endif
1311
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001312 BufferView& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001313 {
1314 m_bufferView = VK_NULL_HANDLE;
1315 return *this;
1316 }
1317
Lenny Komowebf33162016-08-26 14:10:08 -06001318 bool operator==(BufferView const &rhs) const
1319 {
1320 return m_bufferView == rhs.m_bufferView;
1321 }
1322
1323 bool operator!=(BufferView const &rhs) const
1324 {
1325 return m_bufferView != rhs.m_bufferView;
1326 }
1327
1328 bool operator<(BufferView const &rhs) const
1329 {
1330 return m_bufferView < rhs.m_bufferView;
1331 }
1332
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001333#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1334 explicit
1335#endif
1336 operator VkBufferView() const
1337 {
1338 return m_bufferView;
1339 }
1340
1341 explicit operator bool() const
1342 {
1343 return m_bufferView != VK_NULL_HANDLE;
1344 }
1345
1346 bool operator!() const
1347 {
1348 return m_bufferView == VK_NULL_HANDLE;
1349 }
1350
1351 private:
1352 VkBufferView m_bufferView;
1353 };
1354 static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
1355
1356 class Image
1357 {
1358 public:
1359 Image()
1360 : m_image(VK_NULL_HANDLE)
1361 {}
1362
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001363 Image( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001364 : m_image(VK_NULL_HANDLE)
1365 {}
1366
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001367#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1368 Image(VkImage image)
1369 : m_image(image)
1370 {}
1371
1372 Image& operator=(VkImage image)
1373 {
1374 m_image = image;
1375 return *this;
1376 }
1377#endif
1378
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001379 Image& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001380 {
1381 m_image = VK_NULL_HANDLE;
1382 return *this;
1383 }
1384
Lenny Komowebf33162016-08-26 14:10:08 -06001385 bool operator==(Image const &rhs) const
1386 {
1387 return m_image == rhs.m_image;
1388 }
1389
1390 bool operator!=(Image const &rhs) const
1391 {
1392 return m_image != rhs.m_image;
1393 }
1394
1395 bool operator<(Image const &rhs) const
1396 {
1397 return m_image < rhs.m_image;
1398 }
1399
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001400#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1401 explicit
1402#endif
1403 operator VkImage() const
1404 {
1405 return m_image;
1406 }
1407
1408 explicit operator bool() const
1409 {
1410 return m_image != VK_NULL_HANDLE;
1411 }
1412
1413 bool operator!() const
1414 {
1415 return m_image == VK_NULL_HANDLE;
1416 }
1417
1418 private:
1419 VkImage m_image;
1420 };
1421 static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
1422
1423 class ImageView
1424 {
1425 public:
1426 ImageView()
1427 : m_imageView(VK_NULL_HANDLE)
1428 {}
1429
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001430 ImageView( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001431 : m_imageView(VK_NULL_HANDLE)
1432 {}
1433
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001434#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1435 ImageView(VkImageView imageView)
1436 : m_imageView(imageView)
1437 {}
1438
1439 ImageView& operator=(VkImageView imageView)
1440 {
1441 m_imageView = imageView;
1442 return *this;
1443 }
1444#endif
1445
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001446 ImageView& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001447 {
1448 m_imageView = VK_NULL_HANDLE;
1449 return *this;
1450 }
1451
Lenny Komowebf33162016-08-26 14:10:08 -06001452 bool operator==(ImageView const &rhs) const
1453 {
1454 return m_imageView == rhs.m_imageView;
1455 }
1456
1457 bool operator!=(ImageView const &rhs) const
1458 {
1459 return m_imageView != rhs.m_imageView;
1460 }
1461
1462 bool operator<(ImageView const &rhs) const
1463 {
1464 return m_imageView < rhs.m_imageView;
1465 }
1466
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001467#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1468 explicit
1469#endif
1470 operator VkImageView() const
1471 {
1472 return m_imageView;
1473 }
1474
1475 explicit operator bool() const
1476 {
1477 return m_imageView != VK_NULL_HANDLE;
1478 }
1479
1480 bool operator!() const
1481 {
1482 return m_imageView == VK_NULL_HANDLE;
1483 }
1484
1485 private:
1486 VkImageView m_imageView;
1487 };
1488 static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
1489
1490 class ShaderModule
1491 {
1492 public:
1493 ShaderModule()
1494 : m_shaderModule(VK_NULL_HANDLE)
1495 {}
1496
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001497 ShaderModule( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001498 : m_shaderModule(VK_NULL_HANDLE)
1499 {}
1500
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001501#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1502 ShaderModule(VkShaderModule shaderModule)
1503 : m_shaderModule(shaderModule)
1504 {}
1505
1506 ShaderModule& operator=(VkShaderModule shaderModule)
1507 {
1508 m_shaderModule = shaderModule;
1509 return *this;
1510 }
1511#endif
1512
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001513 ShaderModule& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001514 {
1515 m_shaderModule = VK_NULL_HANDLE;
1516 return *this;
1517 }
1518
Lenny Komowebf33162016-08-26 14:10:08 -06001519 bool operator==(ShaderModule const &rhs) const
1520 {
1521 return m_shaderModule == rhs.m_shaderModule;
1522 }
1523
1524 bool operator!=(ShaderModule const &rhs) const
1525 {
1526 return m_shaderModule != rhs.m_shaderModule;
1527 }
1528
1529 bool operator<(ShaderModule const &rhs) const
1530 {
1531 return m_shaderModule < rhs.m_shaderModule;
1532 }
1533
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001534#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1535 explicit
1536#endif
1537 operator VkShaderModule() const
1538 {
1539 return m_shaderModule;
1540 }
1541
1542 explicit operator bool() const
1543 {
1544 return m_shaderModule != VK_NULL_HANDLE;
1545 }
1546
1547 bool operator!() const
1548 {
1549 return m_shaderModule == VK_NULL_HANDLE;
1550 }
1551
1552 private:
1553 VkShaderModule m_shaderModule;
1554 };
1555 static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
1556
1557 class Pipeline
1558 {
1559 public:
1560 Pipeline()
1561 : m_pipeline(VK_NULL_HANDLE)
1562 {}
1563
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001564 Pipeline( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001565 : m_pipeline(VK_NULL_HANDLE)
1566 {}
1567
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001568#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1569 Pipeline(VkPipeline pipeline)
1570 : m_pipeline(pipeline)
1571 {}
1572
1573 Pipeline& operator=(VkPipeline pipeline)
1574 {
1575 m_pipeline = pipeline;
1576 return *this;
1577 }
1578#endif
1579
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001580 Pipeline& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001581 {
1582 m_pipeline = VK_NULL_HANDLE;
1583 return *this;
1584 }
1585
Lenny Komowebf33162016-08-26 14:10:08 -06001586 bool operator==(Pipeline const &rhs) const
1587 {
1588 return m_pipeline == rhs.m_pipeline;
1589 }
1590
1591 bool operator!=(Pipeline const &rhs) const
1592 {
1593 return m_pipeline != rhs.m_pipeline;
1594 }
1595
1596 bool operator<(Pipeline const &rhs) const
1597 {
1598 return m_pipeline < rhs.m_pipeline;
1599 }
1600
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001601#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1602 explicit
1603#endif
1604 operator VkPipeline() const
1605 {
1606 return m_pipeline;
1607 }
1608
1609 explicit operator bool() const
1610 {
1611 return m_pipeline != VK_NULL_HANDLE;
1612 }
1613
1614 bool operator!() const
1615 {
1616 return m_pipeline == VK_NULL_HANDLE;
1617 }
1618
1619 private:
1620 VkPipeline m_pipeline;
1621 };
1622 static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
1623
1624 class PipelineLayout
1625 {
1626 public:
1627 PipelineLayout()
1628 : m_pipelineLayout(VK_NULL_HANDLE)
1629 {}
1630
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001631 PipelineLayout( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001632 : m_pipelineLayout(VK_NULL_HANDLE)
1633 {}
1634
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001635#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1636 PipelineLayout(VkPipelineLayout pipelineLayout)
1637 : m_pipelineLayout(pipelineLayout)
1638 {}
1639
1640 PipelineLayout& operator=(VkPipelineLayout pipelineLayout)
1641 {
1642 m_pipelineLayout = pipelineLayout;
1643 return *this;
1644 }
1645#endif
1646
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001647 PipelineLayout& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001648 {
1649 m_pipelineLayout = VK_NULL_HANDLE;
1650 return *this;
1651 }
1652
Lenny Komowebf33162016-08-26 14:10:08 -06001653 bool operator==(PipelineLayout const &rhs) const
1654 {
1655 return m_pipelineLayout == rhs.m_pipelineLayout;
1656 }
1657
1658 bool operator!=(PipelineLayout const &rhs) const
1659 {
1660 return m_pipelineLayout != rhs.m_pipelineLayout;
1661 }
1662
1663 bool operator<(PipelineLayout const &rhs) const
1664 {
1665 return m_pipelineLayout < rhs.m_pipelineLayout;
1666 }
1667
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001668#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1669 explicit
1670#endif
1671 operator VkPipelineLayout() const
1672 {
1673 return m_pipelineLayout;
1674 }
1675
1676 explicit operator bool() const
1677 {
1678 return m_pipelineLayout != VK_NULL_HANDLE;
1679 }
1680
1681 bool operator!() const
1682 {
1683 return m_pipelineLayout == VK_NULL_HANDLE;
1684 }
1685
1686 private:
1687 VkPipelineLayout m_pipelineLayout;
1688 };
1689 static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
1690
1691 class Sampler
1692 {
1693 public:
1694 Sampler()
1695 : m_sampler(VK_NULL_HANDLE)
1696 {}
1697
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001698 Sampler( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001699 : m_sampler(VK_NULL_HANDLE)
1700 {}
1701
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001702#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1703 Sampler(VkSampler sampler)
1704 : m_sampler(sampler)
1705 {}
1706
1707 Sampler& operator=(VkSampler sampler)
1708 {
1709 m_sampler = sampler;
1710 return *this;
1711 }
1712#endif
1713
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001714 Sampler& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001715 {
1716 m_sampler = VK_NULL_HANDLE;
1717 return *this;
1718 }
1719
Lenny Komowebf33162016-08-26 14:10:08 -06001720 bool operator==(Sampler const &rhs) const
1721 {
1722 return m_sampler == rhs.m_sampler;
1723 }
1724
1725 bool operator!=(Sampler const &rhs) const
1726 {
1727 return m_sampler != rhs.m_sampler;
1728 }
1729
1730 bool operator<(Sampler const &rhs) const
1731 {
1732 return m_sampler < rhs.m_sampler;
1733 }
1734
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001735#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1736 explicit
1737#endif
1738 operator VkSampler() const
1739 {
1740 return m_sampler;
1741 }
1742
1743 explicit operator bool() const
1744 {
1745 return m_sampler != VK_NULL_HANDLE;
1746 }
1747
1748 bool operator!() const
1749 {
1750 return m_sampler == VK_NULL_HANDLE;
1751 }
1752
1753 private:
1754 VkSampler m_sampler;
1755 };
1756 static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
1757
1758 class DescriptorSet
1759 {
1760 public:
1761 DescriptorSet()
1762 : m_descriptorSet(VK_NULL_HANDLE)
1763 {}
1764
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001765 DescriptorSet( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001766 : m_descriptorSet(VK_NULL_HANDLE)
1767 {}
1768
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001769#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1770 DescriptorSet(VkDescriptorSet descriptorSet)
1771 : m_descriptorSet(descriptorSet)
1772 {}
1773
1774 DescriptorSet& operator=(VkDescriptorSet descriptorSet)
1775 {
1776 m_descriptorSet = descriptorSet;
1777 return *this;
1778 }
1779#endif
1780
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001781 DescriptorSet& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001782 {
1783 m_descriptorSet = VK_NULL_HANDLE;
1784 return *this;
1785 }
1786
Lenny Komowebf33162016-08-26 14:10:08 -06001787 bool operator==(DescriptorSet const &rhs) const
1788 {
1789 return m_descriptorSet == rhs.m_descriptorSet;
1790 }
1791
1792 bool operator!=(DescriptorSet const &rhs) const
1793 {
1794 return m_descriptorSet != rhs.m_descriptorSet;
1795 }
1796
1797 bool operator<(DescriptorSet const &rhs) const
1798 {
1799 return m_descriptorSet < rhs.m_descriptorSet;
1800 }
1801
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001802#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1803 explicit
1804#endif
1805 operator VkDescriptorSet() const
1806 {
1807 return m_descriptorSet;
1808 }
1809
1810 explicit operator bool() const
1811 {
1812 return m_descriptorSet != VK_NULL_HANDLE;
1813 }
1814
1815 bool operator!() const
1816 {
1817 return m_descriptorSet == VK_NULL_HANDLE;
1818 }
1819
1820 private:
1821 VkDescriptorSet m_descriptorSet;
1822 };
1823 static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
1824
1825 class DescriptorSetLayout
1826 {
1827 public:
1828 DescriptorSetLayout()
1829 : m_descriptorSetLayout(VK_NULL_HANDLE)
1830 {}
1831
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001832 DescriptorSetLayout( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001833 : m_descriptorSetLayout(VK_NULL_HANDLE)
1834 {}
1835
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001836#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1837 DescriptorSetLayout(VkDescriptorSetLayout descriptorSetLayout)
1838 : m_descriptorSetLayout(descriptorSetLayout)
1839 {}
1840
1841 DescriptorSetLayout& operator=(VkDescriptorSetLayout descriptorSetLayout)
1842 {
1843 m_descriptorSetLayout = descriptorSetLayout;
1844 return *this;
1845 }
1846#endif
1847
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001848 DescriptorSetLayout& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001849 {
1850 m_descriptorSetLayout = VK_NULL_HANDLE;
1851 return *this;
1852 }
1853
Lenny Komowebf33162016-08-26 14:10:08 -06001854 bool operator==(DescriptorSetLayout const &rhs) const
1855 {
1856 return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
1857 }
1858
1859 bool operator!=(DescriptorSetLayout const &rhs) const
1860 {
1861 return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
1862 }
1863
1864 bool operator<(DescriptorSetLayout const &rhs) const
1865 {
1866 return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
1867 }
1868
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001869#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1870 explicit
1871#endif
1872 operator VkDescriptorSetLayout() const
1873 {
1874 return m_descriptorSetLayout;
1875 }
1876
1877 explicit operator bool() const
1878 {
1879 return m_descriptorSetLayout != VK_NULL_HANDLE;
1880 }
1881
1882 bool operator!() const
1883 {
1884 return m_descriptorSetLayout == VK_NULL_HANDLE;
1885 }
1886
1887 private:
1888 VkDescriptorSetLayout m_descriptorSetLayout;
1889 };
1890 static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
1891
1892 class DescriptorPool
1893 {
1894 public:
1895 DescriptorPool()
1896 : m_descriptorPool(VK_NULL_HANDLE)
1897 {}
1898
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001899 DescriptorPool( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001900 : m_descriptorPool(VK_NULL_HANDLE)
1901 {}
1902
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001903#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1904 DescriptorPool(VkDescriptorPool descriptorPool)
1905 : m_descriptorPool(descriptorPool)
1906 {}
1907
1908 DescriptorPool& operator=(VkDescriptorPool descriptorPool)
1909 {
1910 m_descriptorPool = descriptorPool;
1911 return *this;
1912 }
1913#endif
1914
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001915 DescriptorPool& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001916 {
1917 m_descriptorPool = VK_NULL_HANDLE;
1918 return *this;
1919 }
1920
Lenny Komowebf33162016-08-26 14:10:08 -06001921 bool operator==(DescriptorPool const &rhs) const
1922 {
1923 return m_descriptorPool == rhs.m_descriptorPool;
1924 }
1925
1926 bool operator!=(DescriptorPool const &rhs) const
1927 {
1928 return m_descriptorPool != rhs.m_descriptorPool;
1929 }
1930
1931 bool operator<(DescriptorPool const &rhs) const
1932 {
1933 return m_descriptorPool < rhs.m_descriptorPool;
1934 }
1935
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001936#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1937 explicit
1938#endif
1939 operator VkDescriptorPool() const
1940 {
1941 return m_descriptorPool;
1942 }
1943
1944 explicit operator bool() const
1945 {
1946 return m_descriptorPool != VK_NULL_HANDLE;
1947 }
1948
1949 bool operator!() const
1950 {
1951 return m_descriptorPool == VK_NULL_HANDLE;
1952 }
1953
1954 private:
1955 VkDescriptorPool m_descriptorPool;
1956 };
1957 static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
1958
1959 class Fence
1960 {
1961 public:
1962 Fence()
1963 : m_fence(VK_NULL_HANDLE)
1964 {}
1965
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001966 Fence( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001967 : m_fence(VK_NULL_HANDLE)
1968 {}
1969
Lenny Komowbed9b5c2016-08-11 11:23:15 -06001970#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
1971 Fence(VkFence fence)
1972 : m_fence(fence)
1973 {}
1974
1975 Fence& operator=(VkFence fence)
1976 {
1977 m_fence = fence;
1978 return *this;
1979 }
1980#endif
1981
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07001982 Fence& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07001983 {
1984 m_fence = VK_NULL_HANDLE;
1985 return *this;
1986 }
1987
Lenny Komowebf33162016-08-26 14:10:08 -06001988 bool operator==(Fence const &rhs) const
1989 {
1990 return m_fence == rhs.m_fence;
1991 }
1992
1993 bool operator!=(Fence const &rhs) const
1994 {
1995 return m_fence != rhs.m_fence;
1996 }
1997
1998 bool operator<(Fence const &rhs) const
1999 {
2000 return m_fence < rhs.m_fence;
2001 }
2002
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002003#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2004 explicit
2005#endif
2006 operator VkFence() const
2007 {
2008 return m_fence;
2009 }
2010
2011 explicit operator bool() const
2012 {
2013 return m_fence != VK_NULL_HANDLE;
2014 }
2015
2016 bool operator!() const
2017 {
2018 return m_fence == VK_NULL_HANDLE;
2019 }
2020
2021 private:
2022 VkFence m_fence;
2023 };
2024 static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
2025
2026 class Semaphore
2027 {
2028 public:
2029 Semaphore()
2030 : m_semaphore(VK_NULL_HANDLE)
2031 {}
2032
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002033 Semaphore( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002034 : m_semaphore(VK_NULL_HANDLE)
2035 {}
2036
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002037#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2038 Semaphore(VkSemaphore semaphore)
2039 : m_semaphore(semaphore)
2040 {}
2041
2042 Semaphore& operator=(VkSemaphore semaphore)
2043 {
2044 m_semaphore = semaphore;
2045 return *this;
2046 }
2047#endif
2048
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002049 Semaphore& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002050 {
2051 m_semaphore = VK_NULL_HANDLE;
2052 return *this;
2053 }
2054
Lenny Komowebf33162016-08-26 14:10:08 -06002055 bool operator==(Semaphore const &rhs) const
2056 {
2057 return m_semaphore == rhs.m_semaphore;
2058 }
2059
2060 bool operator!=(Semaphore const &rhs) const
2061 {
2062 return m_semaphore != rhs.m_semaphore;
2063 }
2064
2065 bool operator<(Semaphore const &rhs) const
2066 {
2067 return m_semaphore < rhs.m_semaphore;
2068 }
2069
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002070#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2071 explicit
2072#endif
2073 operator VkSemaphore() const
2074 {
2075 return m_semaphore;
2076 }
2077
2078 explicit operator bool() const
2079 {
2080 return m_semaphore != VK_NULL_HANDLE;
2081 }
2082
2083 bool operator!() const
2084 {
2085 return m_semaphore == VK_NULL_HANDLE;
2086 }
2087
2088 private:
2089 VkSemaphore m_semaphore;
2090 };
2091 static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
2092
2093 class Event
2094 {
2095 public:
2096 Event()
2097 : m_event(VK_NULL_HANDLE)
2098 {}
2099
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002100 Event( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002101 : m_event(VK_NULL_HANDLE)
2102 {}
2103
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002104#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2105 Event(VkEvent event)
2106 : m_event(event)
2107 {}
2108
2109 Event& operator=(VkEvent event)
2110 {
2111 m_event = event;
2112 return *this;
2113 }
2114#endif
2115
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002116 Event& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002117 {
2118 m_event = VK_NULL_HANDLE;
2119 return *this;
2120 }
2121
Lenny Komowebf33162016-08-26 14:10:08 -06002122 bool operator==(Event const &rhs) const
2123 {
2124 return m_event == rhs.m_event;
2125 }
2126
2127 bool operator!=(Event const &rhs) const
2128 {
2129 return m_event != rhs.m_event;
2130 }
2131
2132 bool operator<(Event const &rhs) const
2133 {
2134 return m_event < rhs.m_event;
2135 }
2136
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002137#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2138 explicit
2139#endif
2140 operator VkEvent() const
2141 {
2142 return m_event;
2143 }
2144
2145 explicit operator bool() const
2146 {
2147 return m_event != VK_NULL_HANDLE;
2148 }
2149
2150 bool operator!() const
2151 {
2152 return m_event == VK_NULL_HANDLE;
2153 }
2154
2155 private:
2156 VkEvent m_event;
2157 };
2158 static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
2159
2160 class QueryPool
2161 {
2162 public:
2163 QueryPool()
2164 : m_queryPool(VK_NULL_HANDLE)
2165 {}
2166
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002167 QueryPool( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002168 : m_queryPool(VK_NULL_HANDLE)
2169 {}
2170
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002171#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2172 QueryPool(VkQueryPool queryPool)
2173 : m_queryPool(queryPool)
2174 {}
2175
2176 QueryPool& operator=(VkQueryPool queryPool)
2177 {
2178 m_queryPool = queryPool;
2179 return *this;
2180 }
2181#endif
2182
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002183 QueryPool& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002184 {
2185 m_queryPool = VK_NULL_HANDLE;
2186 return *this;
2187 }
2188
Lenny Komowebf33162016-08-26 14:10:08 -06002189 bool operator==(QueryPool const &rhs) const
2190 {
2191 return m_queryPool == rhs.m_queryPool;
2192 }
2193
2194 bool operator!=(QueryPool const &rhs) const
2195 {
2196 return m_queryPool != rhs.m_queryPool;
2197 }
2198
2199 bool operator<(QueryPool const &rhs) const
2200 {
2201 return m_queryPool < rhs.m_queryPool;
2202 }
2203
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002204#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2205 explicit
2206#endif
2207 operator VkQueryPool() const
2208 {
2209 return m_queryPool;
2210 }
2211
2212 explicit operator bool() const
2213 {
2214 return m_queryPool != VK_NULL_HANDLE;
2215 }
2216
2217 bool operator!() const
2218 {
2219 return m_queryPool == VK_NULL_HANDLE;
2220 }
2221
2222 private:
2223 VkQueryPool m_queryPool;
2224 };
2225 static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
2226
2227 class Framebuffer
2228 {
2229 public:
2230 Framebuffer()
2231 : m_framebuffer(VK_NULL_HANDLE)
2232 {}
2233
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002234 Framebuffer( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002235 : m_framebuffer(VK_NULL_HANDLE)
2236 {}
2237
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002238#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2239 Framebuffer(VkFramebuffer framebuffer)
2240 : m_framebuffer(framebuffer)
2241 {}
2242
2243 Framebuffer& operator=(VkFramebuffer framebuffer)
2244 {
2245 m_framebuffer = framebuffer;
2246 return *this;
2247 }
2248#endif
2249
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002250 Framebuffer& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002251 {
2252 m_framebuffer = VK_NULL_HANDLE;
2253 return *this;
2254 }
2255
Lenny Komowebf33162016-08-26 14:10:08 -06002256 bool operator==(Framebuffer const &rhs) const
2257 {
2258 return m_framebuffer == rhs.m_framebuffer;
2259 }
2260
2261 bool operator!=(Framebuffer const &rhs) const
2262 {
2263 return m_framebuffer != rhs.m_framebuffer;
2264 }
2265
2266 bool operator<(Framebuffer const &rhs) const
2267 {
2268 return m_framebuffer < rhs.m_framebuffer;
2269 }
2270
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002271#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2272 explicit
2273#endif
2274 operator VkFramebuffer() const
2275 {
2276 return m_framebuffer;
2277 }
2278
2279 explicit operator bool() const
2280 {
2281 return m_framebuffer != VK_NULL_HANDLE;
2282 }
2283
2284 bool operator!() const
2285 {
2286 return m_framebuffer == VK_NULL_HANDLE;
2287 }
2288
2289 private:
2290 VkFramebuffer m_framebuffer;
2291 };
2292 static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
2293
2294 class RenderPass
2295 {
2296 public:
2297 RenderPass()
2298 : m_renderPass(VK_NULL_HANDLE)
2299 {}
2300
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002301 RenderPass( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002302 : m_renderPass(VK_NULL_HANDLE)
2303 {}
2304
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002305#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2306 RenderPass(VkRenderPass renderPass)
2307 : m_renderPass(renderPass)
2308 {}
2309
2310 RenderPass& operator=(VkRenderPass renderPass)
2311 {
2312 m_renderPass = renderPass;
2313 return *this;
2314 }
2315#endif
2316
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002317 RenderPass& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002318 {
2319 m_renderPass = VK_NULL_HANDLE;
2320 return *this;
2321 }
2322
Lenny Komowebf33162016-08-26 14:10:08 -06002323 bool operator==(RenderPass const &rhs) const
2324 {
2325 return m_renderPass == rhs.m_renderPass;
2326 }
2327
2328 bool operator!=(RenderPass const &rhs) const
2329 {
2330 return m_renderPass != rhs.m_renderPass;
2331 }
2332
2333 bool operator<(RenderPass const &rhs) const
2334 {
2335 return m_renderPass < rhs.m_renderPass;
2336 }
2337
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002338#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2339 explicit
2340#endif
2341 operator VkRenderPass() const
2342 {
2343 return m_renderPass;
2344 }
2345
2346 explicit operator bool() const
2347 {
2348 return m_renderPass != VK_NULL_HANDLE;
2349 }
2350
2351 bool operator!() const
2352 {
2353 return m_renderPass == VK_NULL_HANDLE;
2354 }
2355
2356 private:
2357 VkRenderPass m_renderPass;
2358 };
2359 static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
2360
2361 class PipelineCache
2362 {
2363 public:
2364 PipelineCache()
2365 : m_pipelineCache(VK_NULL_HANDLE)
2366 {}
2367
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002368 PipelineCache( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002369 : m_pipelineCache(VK_NULL_HANDLE)
2370 {}
2371
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002372#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2373 PipelineCache(VkPipelineCache pipelineCache)
2374 : m_pipelineCache(pipelineCache)
2375 {}
2376
2377 PipelineCache& operator=(VkPipelineCache pipelineCache)
2378 {
2379 m_pipelineCache = pipelineCache;
2380 return *this;
2381 }
2382#endif
2383
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002384 PipelineCache& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002385 {
2386 m_pipelineCache = VK_NULL_HANDLE;
2387 return *this;
2388 }
2389
Lenny Komowebf33162016-08-26 14:10:08 -06002390 bool operator==(PipelineCache const &rhs) const
2391 {
2392 return m_pipelineCache == rhs.m_pipelineCache;
2393 }
2394
2395 bool operator!=(PipelineCache const &rhs) const
2396 {
2397 return m_pipelineCache != rhs.m_pipelineCache;
2398 }
2399
2400 bool operator<(PipelineCache const &rhs) const
2401 {
2402 return m_pipelineCache < rhs.m_pipelineCache;
2403 }
2404
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002405#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2406 explicit
2407#endif
2408 operator VkPipelineCache() const
2409 {
2410 return m_pipelineCache;
2411 }
2412
2413 explicit operator bool() const
2414 {
2415 return m_pipelineCache != VK_NULL_HANDLE;
2416 }
2417
2418 bool operator!() const
2419 {
2420 return m_pipelineCache == VK_NULL_HANDLE;
2421 }
2422
2423 private:
2424 VkPipelineCache m_pipelineCache;
2425 };
2426 static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
2427
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002428 class ObjectTableNVX
2429 {
2430 public:
2431 ObjectTableNVX()
2432 : m_objectTableNVX(VK_NULL_HANDLE)
2433 {}
2434
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002435 ObjectTableNVX( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002436 : m_objectTableNVX(VK_NULL_HANDLE)
2437 {}
2438
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002439#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2440 ObjectTableNVX(VkObjectTableNVX objectTableNVX)
2441 : m_objectTableNVX(objectTableNVX)
2442 {}
2443
2444 ObjectTableNVX& operator=(VkObjectTableNVX objectTableNVX)
2445 {
2446 m_objectTableNVX = objectTableNVX;
2447 return *this;
2448 }
2449#endif
2450
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002451 ObjectTableNVX& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002452 {
2453 m_objectTableNVX = VK_NULL_HANDLE;
2454 return *this;
2455 }
2456
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002457 bool operator==(ObjectTableNVX const &rhs) const
2458 {
2459 return m_objectTableNVX == rhs.m_objectTableNVX;
2460 }
2461
2462 bool operator!=(ObjectTableNVX const &rhs) const
2463 {
2464 return m_objectTableNVX != rhs.m_objectTableNVX;
2465 }
2466
2467 bool operator<(ObjectTableNVX const &rhs) const
2468 {
2469 return m_objectTableNVX < rhs.m_objectTableNVX;
2470 }
2471
2472#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2473 explicit
2474#endif
2475 operator VkObjectTableNVX() const
2476 {
2477 return m_objectTableNVX;
2478 }
2479
2480 explicit operator bool() const
2481 {
2482 return m_objectTableNVX != VK_NULL_HANDLE;
2483 }
2484
2485 bool operator!() const
2486 {
2487 return m_objectTableNVX == VK_NULL_HANDLE;
2488 }
2489
2490 private:
2491 VkObjectTableNVX m_objectTableNVX;
2492 };
2493 static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" );
2494
2495 class IndirectCommandsLayoutNVX
2496 {
2497 public:
2498 IndirectCommandsLayoutNVX()
2499 : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
2500 {}
2501
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002502 IndirectCommandsLayoutNVX( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002503 : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE)
2504 {}
2505
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002506#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2507 IndirectCommandsLayoutNVX(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2508 : m_indirectCommandsLayoutNVX(indirectCommandsLayoutNVX)
2509 {}
2510
2511 IndirectCommandsLayoutNVX& operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX)
2512 {
2513 m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX;
2514 return *this;
2515 }
2516#endif
2517
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002518 IndirectCommandsLayoutNVX& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002519 {
2520 m_indirectCommandsLayoutNVX = VK_NULL_HANDLE;
2521 return *this;
2522 }
2523
Mark Lobodzinski2d589822016-12-12 09:44:34 -07002524 bool operator==(IndirectCommandsLayoutNVX const &rhs) const
2525 {
2526 return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX;
2527 }
2528
2529 bool operator!=(IndirectCommandsLayoutNVX const &rhs) const
2530 {
2531 return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX;
2532 }
2533
2534 bool operator<(IndirectCommandsLayoutNVX const &rhs) const
2535 {
2536 return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX;
2537 }
2538
2539#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2540 explicit
2541#endif
2542 operator VkIndirectCommandsLayoutNVX() const
2543 {
2544 return m_indirectCommandsLayoutNVX;
2545 }
2546
2547 explicit operator bool() const
2548 {
2549 return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE;
2550 }
2551
2552 bool operator!() const
2553 {
2554 return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE;
2555 }
2556
2557 private:
2558 VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX;
2559 };
2560 static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" );
2561
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002562 class DisplayKHR
2563 {
2564 public:
2565 DisplayKHR()
2566 : m_displayKHR(VK_NULL_HANDLE)
2567 {}
2568
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002569 DisplayKHR( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002570 : m_displayKHR(VK_NULL_HANDLE)
2571 {}
2572
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002573#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2574 DisplayKHR(VkDisplayKHR displayKHR)
2575 : m_displayKHR(displayKHR)
2576 {}
2577
2578 DisplayKHR& operator=(VkDisplayKHR displayKHR)
2579 {
2580 m_displayKHR = displayKHR;
2581 return *this;
2582 }
2583#endif
2584
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002585 DisplayKHR& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002586 {
2587 m_displayKHR = VK_NULL_HANDLE;
2588 return *this;
2589 }
2590
Lenny Komowebf33162016-08-26 14:10:08 -06002591 bool operator==(DisplayKHR const &rhs) const
2592 {
2593 return m_displayKHR == rhs.m_displayKHR;
2594 }
2595
2596 bool operator!=(DisplayKHR const &rhs) const
2597 {
2598 return m_displayKHR != rhs.m_displayKHR;
2599 }
2600
2601 bool operator<(DisplayKHR const &rhs) const
2602 {
2603 return m_displayKHR < rhs.m_displayKHR;
2604 }
2605
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002606#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2607 explicit
2608#endif
2609 operator VkDisplayKHR() const
2610 {
2611 return m_displayKHR;
2612 }
2613
2614 explicit operator bool() const
2615 {
2616 return m_displayKHR != VK_NULL_HANDLE;
2617 }
2618
2619 bool operator!() const
2620 {
2621 return m_displayKHR == VK_NULL_HANDLE;
2622 }
2623
2624 private:
2625 VkDisplayKHR m_displayKHR;
2626 };
2627 static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
2628
2629 class DisplayModeKHR
2630 {
2631 public:
2632 DisplayModeKHR()
2633 : m_displayModeKHR(VK_NULL_HANDLE)
2634 {}
2635
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002636 DisplayModeKHR( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002637 : m_displayModeKHR(VK_NULL_HANDLE)
2638 {}
2639
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002640#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2641 DisplayModeKHR(VkDisplayModeKHR displayModeKHR)
2642 : m_displayModeKHR(displayModeKHR)
2643 {}
2644
2645 DisplayModeKHR& operator=(VkDisplayModeKHR displayModeKHR)
2646 {
2647 m_displayModeKHR = displayModeKHR;
2648 return *this;
2649 }
2650#endif
2651
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002652 DisplayModeKHR& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002653 {
2654 m_displayModeKHR = VK_NULL_HANDLE;
2655 return *this;
2656 }
2657
Lenny Komowebf33162016-08-26 14:10:08 -06002658 bool operator==(DisplayModeKHR const &rhs) const
2659 {
2660 return m_displayModeKHR == rhs.m_displayModeKHR;
2661 }
2662
2663 bool operator!=(DisplayModeKHR const &rhs) const
2664 {
2665 return m_displayModeKHR != rhs.m_displayModeKHR;
2666 }
2667
2668 bool operator<(DisplayModeKHR const &rhs) const
2669 {
2670 return m_displayModeKHR < rhs.m_displayModeKHR;
2671 }
2672
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002673#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2674 explicit
2675#endif
2676 operator VkDisplayModeKHR() const
2677 {
2678 return m_displayModeKHR;
2679 }
2680
2681 explicit operator bool() const
2682 {
2683 return m_displayModeKHR != VK_NULL_HANDLE;
2684 }
2685
2686 bool operator!() const
2687 {
2688 return m_displayModeKHR == VK_NULL_HANDLE;
2689 }
2690
2691 private:
2692 VkDisplayModeKHR m_displayModeKHR;
2693 };
2694 static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
2695
2696 class SurfaceKHR
2697 {
2698 public:
2699 SurfaceKHR()
2700 : m_surfaceKHR(VK_NULL_HANDLE)
2701 {}
2702
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002703 SurfaceKHR( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002704 : m_surfaceKHR(VK_NULL_HANDLE)
2705 {}
2706
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002707#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2708 SurfaceKHR(VkSurfaceKHR surfaceKHR)
2709 : m_surfaceKHR(surfaceKHR)
2710 {}
2711
2712 SurfaceKHR& operator=(VkSurfaceKHR surfaceKHR)
2713 {
2714 m_surfaceKHR = surfaceKHR;
2715 return *this;
2716 }
2717#endif
2718
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002719 SurfaceKHR& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002720 {
2721 m_surfaceKHR = VK_NULL_HANDLE;
2722 return *this;
2723 }
2724
Lenny Komowebf33162016-08-26 14:10:08 -06002725 bool operator==(SurfaceKHR const &rhs) const
2726 {
2727 return m_surfaceKHR == rhs.m_surfaceKHR;
2728 }
2729
2730 bool operator!=(SurfaceKHR const &rhs) const
2731 {
2732 return m_surfaceKHR != rhs.m_surfaceKHR;
2733 }
2734
2735 bool operator<(SurfaceKHR const &rhs) const
2736 {
2737 return m_surfaceKHR < rhs.m_surfaceKHR;
2738 }
2739
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002740#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2741 explicit
2742#endif
2743 operator VkSurfaceKHR() const
2744 {
2745 return m_surfaceKHR;
2746 }
2747
2748 explicit operator bool() const
2749 {
2750 return m_surfaceKHR != VK_NULL_HANDLE;
2751 }
2752
2753 bool operator!() const
2754 {
2755 return m_surfaceKHR == VK_NULL_HANDLE;
2756 }
2757
2758 private:
2759 VkSurfaceKHR m_surfaceKHR;
2760 };
2761 static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
2762
2763 class SwapchainKHR
2764 {
2765 public:
2766 SwapchainKHR()
2767 : m_swapchainKHR(VK_NULL_HANDLE)
2768 {}
2769
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002770 SwapchainKHR( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002771 : m_swapchainKHR(VK_NULL_HANDLE)
2772 {}
2773
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002774#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2775 SwapchainKHR(VkSwapchainKHR swapchainKHR)
2776 : m_swapchainKHR(swapchainKHR)
2777 {}
2778
2779 SwapchainKHR& operator=(VkSwapchainKHR swapchainKHR)
2780 {
2781 m_swapchainKHR = swapchainKHR;
2782 return *this;
2783 }
2784#endif
2785
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002786 SwapchainKHR& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002787 {
2788 m_swapchainKHR = VK_NULL_HANDLE;
2789 return *this;
2790 }
2791
Lenny Komowebf33162016-08-26 14:10:08 -06002792 bool operator==(SwapchainKHR const &rhs) const
2793 {
2794 return m_swapchainKHR == rhs.m_swapchainKHR;
2795 }
2796
2797 bool operator!=(SwapchainKHR const &rhs) const
2798 {
2799 return m_swapchainKHR != rhs.m_swapchainKHR;
2800 }
2801
2802 bool operator<(SwapchainKHR const &rhs) const
2803 {
2804 return m_swapchainKHR < rhs.m_swapchainKHR;
2805 }
2806
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002807#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2808 explicit
2809#endif
2810 operator VkSwapchainKHR() const
2811 {
2812 return m_swapchainKHR;
2813 }
2814
2815 explicit operator bool() const
2816 {
2817 return m_swapchainKHR != VK_NULL_HANDLE;
2818 }
2819
2820 bool operator!() const
2821 {
2822 return m_swapchainKHR == VK_NULL_HANDLE;
2823 }
2824
2825 private:
2826 VkSwapchainKHR m_swapchainKHR;
2827 };
2828 static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
2829
2830 class DebugReportCallbackEXT
2831 {
2832 public:
2833 DebugReportCallbackEXT()
2834 : m_debugReportCallbackEXT(VK_NULL_HANDLE)
2835 {}
2836
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002837 DebugReportCallbackEXT( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002838 : m_debugReportCallbackEXT(VK_NULL_HANDLE)
2839 {}
2840
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002841#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2842 DebugReportCallbackEXT(VkDebugReportCallbackEXT debugReportCallbackEXT)
2843 : m_debugReportCallbackEXT(debugReportCallbackEXT)
2844 {}
2845
2846 DebugReportCallbackEXT& operator=(VkDebugReportCallbackEXT debugReportCallbackEXT)
2847 {
2848 m_debugReportCallbackEXT = debugReportCallbackEXT;
2849 return *this;
2850 }
2851#endif
2852
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -07002853 DebugReportCallbackEXT& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -07002854 {
2855 m_debugReportCallbackEXT = VK_NULL_HANDLE;
2856 return *this;
2857 }
2858
Lenny Komowebf33162016-08-26 14:10:08 -06002859 bool operator==(DebugReportCallbackEXT const &rhs) const
2860 {
2861 return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
2862 }
2863
2864 bool operator!=(DebugReportCallbackEXT const &rhs) const
2865 {
2866 return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
2867 }
2868
2869 bool operator<(DebugReportCallbackEXT const &rhs) const
2870 {
2871 return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
2872 }
2873
Lenny Komowbed9b5c2016-08-11 11:23:15 -06002874#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
2875 explicit
2876#endif
2877 operator VkDebugReportCallbackEXT() const
2878 {
2879 return m_debugReportCallbackEXT;
2880 }
2881
2882 explicit operator bool() const
2883 {
2884 return m_debugReportCallbackEXT != VK_NULL_HANDLE;
2885 }
2886
2887 bool operator!() const
2888 {
2889 return m_debugReportCallbackEXT == VK_NULL_HANDLE;
2890 }
2891
2892 private:
2893 VkDebugReportCallbackEXT m_debugReportCallbackEXT;
2894 };
2895 static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
2896
2897 struct Offset2D
2898 {
2899 Offset2D( int32_t x_ = 0, int32_t y_ = 0 )
2900 : x( x_ )
2901 , y( y_ )
2902 {
2903 }
2904
2905 Offset2D( VkOffset2D const & rhs )
2906 {
2907 memcpy( this, &rhs, sizeof(Offset2D) );
2908 }
2909
2910 Offset2D& operator=( VkOffset2D const & rhs )
2911 {
2912 memcpy( this, &rhs, sizeof(Offset2D) );
2913 return *this;
2914 }
2915
2916 Offset2D& setX( int32_t x_ )
2917 {
2918 x = x_;
2919 return *this;
2920 }
2921
2922 Offset2D& setY( int32_t y_ )
2923 {
2924 y = y_;
2925 return *this;
2926 }
2927
2928 operator const VkOffset2D&() const
2929 {
2930 return *reinterpret_cast<const VkOffset2D*>(this);
2931 }
2932
2933 bool operator==( Offset2D const& rhs ) const
2934 {
2935 return ( x == rhs.x )
2936 && ( y == rhs.y );
2937 }
2938
2939 bool operator!=( Offset2D const& rhs ) const
2940 {
2941 return !operator==( rhs );
2942 }
2943
2944 int32_t x;
2945 int32_t y;
2946 };
2947 static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
2948
2949 struct Offset3D
2950 {
2951 Offset3D( int32_t x_ = 0, int32_t y_ = 0, int32_t z_ = 0 )
2952 : x( x_ )
2953 , y( y_ )
2954 , z( z_ )
2955 {
2956 }
2957
2958 Offset3D( VkOffset3D const & rhs )
2959 {
2960 memcpy( this, &rhs, sizeof(Offset3D) );
2961 }
2962
2963 Offset3D& operator=( VkOffset3D const & rhs )
2964 {
2965 memcpy( this, &rhs, sizeof(Offset3D) );
2966 return *this;
2967 }
2968
2969 Offset3D& setX( int32_t x_ )
2970 {
2971 x = x_;
2972 return *this;
2973 }
2974
2975 Offset3D& setY( int32_t y_ )
2976 {
2977 y = y_;
2978 return *this;
2979 }
2980
2981 Offset3D& setZ( int32_t z_ )
2982 {
2983 z = z_;
2984 return *this;
2985 }
2986
2987 operator const VkOffset3D&() const
2988 {
2989 return *reinterpret_cast<const VkOffset3D*>(this);
2990 }
2991
2992 bool operator==( Offset3D const& rhs ) const
2993 {
2994 return ( x == rhs.x )
2995 && ( y == rhs.y )
2996 && ( z == rhs.z );
2997 }
2998
2999 bool operator!=( Offset3D const& rhs ) const
3000 {
3001 return !operator==( rhs );
3002 }
3003
3004 int32_t x;
3005 int32_t y;
3006 int32_t z;
3007 };
3008 static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
3009
3010 struct Extent2D
3011 {
3012 Extent2D( uint32_t width_ = 0, uint32_t height_ = 0 )
3013 : width( width_ )
3014 , height( height_ )
3015 {
3016 }
3017
3018 Extent2D( VkExtent2D const & rhs )
3019 {
3020 memcpy( this, &rhs, sizeof(Extent2D) );
3021 }
3022
3023 Extent2D& operator=( VkExtent2D const & rhs )
3024 {
3025 memcpy( this, &rhs, sizeof(Extent2D) );
3026 return *this;
3027 }
3028
3029 Extent2D& setWidth( uint32_t width_ )
3030 {
3031 width = width_;
3032 return *this;
3033 }
3034
3035 Extent2D& setHeight( uint32_t height_ )
3036 {
3037 height = height_;
3038 return *this;
3039 }
3040
3041 operator const VkExtent2D&() const
3042 {
3043 return *reinterpret_cast<const VkExtent2D*>(this);
3044 }
3045
3046 bool operator==( Extent2D const& rhs ) const
3047 {
3048 return ( width == rhs.width )
3049 && ( height == rhs.height );
3050 }
3051
3052 bool operator!=( Extent2D const& rhs ) const
3053 {
3054 return !operator==( rhs );
3055 }
3056
3057 uint32_t width;
3058 uint32_t height;
3059 };
3060 static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
3061
3062 struct Extent3D
3063 {
3064 Extent3D( uint32_t width_ = 0, uint32_t height_ = 0, uint32_t depth_ = 0 )
3065 : width( width_ )
3066 , height( height_ )
3067 , depth( depth_ )
3068 {
3069 }
3070
3071 Extent3D( VkExtent3D const & rhs )
3072 {
3073 memcpy( this, &rhs, sizeof(Extent3D) );
3074 }
3075
3076 Extent3D& operator=( VkExtent3D const & rhs )
3077 {
3078 memcpy( this, &rhs, sizeof(Extent3D) );
3079 return *this;
3080 }
3081
3082 Extent3D& setWidth( uint32_t width_ )
3083 {
3084 width = width_;
3085 return *this;
3086 }
3087
3088 Extent3D& setHeight( uint32_t height_ )
3089 {
3090 height = height_;
3091 return *this;
3092 }
3093
3094 Extent3D& setDepth( uint32_t depth_ )
3095 {
3096 depth = depth_;
3097 return *this;
3098 }
3099
3100 operator const VkExtent3D&() const
3101 {
3102 return *reinterpret_cast<const VkExtent3D*>(this);
3103 }
3104
3105 bool operator==( Extent3D const& rhs ) const
3106 {
3107 return ( width == rhs.width )
3108 && ( height == rhs.height )
3109 && ( depth == rhs.depth );
3110 }
3111
3112 bool operator!=( Extent3D const& rhs ) const
3113 {
3114 return !operator==( rhs );
3115 }
3116
3117 uint32_t width;
3118 uint32_t height;
3119 uint32_t depth;
3120 };
3121 static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
3122
3123 struct Viewport
3124 {
3125 Viewport( float x_ = 0, float y_ = 0, float width_ = 0, float height_ = 0, float minDepth_ = 0, float maxDepth_ = 0 )
3126 : x( x_ )
3127 , y( y_ )
3128 , width( width_ )
3129 , height( height_ )
3130 , minDepth( minDepth_ )
3131 , maxDepth( maxDepth_ )
3132 {
3133 }
3134
3135 Viewport( VkViewport const & rhs )
3136 {
3137 memcpy( this, &rhs, sizeof(Viewport) );
3138 }
3139
3140 Viewport& operator=( VkViewport const & rhs )
3141 {
3142 memcpy( this, &rhs, sizeof(Viewport) );
3143 return *this;
3144 }
3145
3146 Viewport& setX( float x_ )
3147 {
3148 x = x_;
3149 return *this;
3150 }
3151
3152 Viewport& setY( float y_ )
3153 {
3154 y = y_;
3155 return *this;
3156 }
3157
3158 Viewport& setWidth( float width_ )
3159 {
3160 width = width_;
3161 return *this;
3162 }
3163
3164 Viewport& setHeight( float height_ )
3165 {
3166 height = height_;
3167 return *this;
3168 }
3169
3170 Viewport& setMinDepth( float minDepth_ )
3171 {
3172 minDepth = minDepth_;
3173 return *this;
3174 }
3175
3176 Viewport& setMaxDepth( float maxDepth_ )
3177 {
3178 maxDepth = maxDepth_;
3179 return *this;
3180 }
3181
3182 operator const VkViewport&() const
3183 {
3184 return *reinterpret_cast<const VkViewport*>(this);
3185 }
3186
3187 bool operator==( Viewport const& rhs ) const
3188 {
3189 return ( x == rhs.x )
3190 && ( y == rhs.y )
3191 && ( width == rhs.width )
3192 && ( height == rhs.height )
3193 && ( minDepth == rhs.minDepth )
3194 && ( maxDepth == rhs.maxDepth );
3195 }
3196
3197 bool operator!=( Viewport const& rhs ) const
3198 {
3199 return !operator==( rhs );
3200 }
3201
3202 float x;
3203 float y;
3204 float width;
3205 float height;
3206 float minDepth;
3207 float maxDepth;
3208 };
3209 static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
3210
3211 struct Rect2D
3212 {
3213 Rect2D( Offset2D offset_ = Offset2D(), Extent2D extent_ = Extent2D() )
3214 : offset( offset_ )
3215 , extent( extent_ )
3216 {
3217 }
3218
3219 Rect2D( VkRect2D const & rhs )
3220 {
3221 memcpy( this, &rhs, sizeof(Rect2D) );
3222 }
3223
3224 Rect2D& operator=( VkRect2D const & rhs )
3225 {
3226 memcpy( this, &rhs, sizeof(Rect2D) );
3227 return *this;
3228 }
3229
3230 Rect2D& setOffset( Offset2D offset_ )
3231 {
3232 offset = offset_;
3233 return *this;
3234 }
3235
3236 Rect2D& setExtent( Extent2D extent_ )
3237 {
3238 extent = extent_;
3239 return *this;
3240 }
3241
3242 operator const VkRect2D&() const
3243 {
3244 return *reinterpret_cast<const VkRect2D*>(this);
3245 }
3246
3247 bool operator==( Rect2D const& rhs ) const
3248 {
3249 return ( offset == rhs.offset )
3250 && ( extent == rhs.extent );
3251 }
3252
3253 bool operator!=( Rect2D const& rhs ) const
3254 {
3255 return !operator==( rhs );
3256 }
3257
3258 Offset2D offset;
3259 Extent2D extent;
3260 };
3261 static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
3262
3263 struct ClearRect
3264 {
3265 ClearRect( Rect2D rect_ = Rect2D(), uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
3266 : rect( rect_ )
3267 , baseArrayLayer( baseArrayLayer_ )
3268 , layerCount( layerCount_ )
3269 {
3270 }
3271
3272 ClearRect( VkClearRect const & rhs )
3273 {
3274 memcpy( this, &rhs, sizeof(ClearRect) );
3275 }
3276
3277 ClearRect& operator=( VkClearRect const & rhs )
3278 {
3279 memcpy( this, &rhs, sizeof(ClearRect) );
3280 return *this;
3281 }
3282
3283 ClearRect& setRect( Rect2D rect_ )
3284 {
3285 rect = rect_;
3286 return *this;
3287 }
3288
3289 ClearRect& setBaseArrayLayer( uint32_t baseArrayLayer_ )
3290 {
3291 baseArrayLayer = baseArrayLayer_;
3292 return *this;
3293 }
3294
3295 ClearRect& setLayerCount( uint32_t layerCount_ )
3296 {
3297 layerCount = layerCount_;
3298 return *this;
3299 }
3300
3301 operator const VkClearRect&() const
3302 {
3303 return *reinterpret_cast<const VkClearRect*>(this);
3304 }
3305
3306 bool operator==( ClearRect const& rhs ) const
3307 {
3308 return ( rect == rhs.rect )
3309 && ( baseArrayLayer == rhs.baseArrayLayer )
3310 && ( layerCount == rhs.layerCount );
3311 }
3312
3313 bool operator!=( ClearRect const& rhs ) const
3314 {
3315 return !operator==( rhs );
3316 }
3317
3318 Rect2D rect;
3319 uint32_t baseArrayLayer;
3320 uint32_t layerCount;
3321 };
3322 static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
3323
3324 struct ExtensionProperties
3325 {
3326 operator const VkExtensionProperties&() const
3327 {
3328 return *reinterpret_cast<const VkExtensionProperties*>(this);
3329 }
3330
3331 bool operator==( ExtensionProperties const& rhs ) const
3332 {
3333 return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
3334 && ( specVersion == rhs.specVersion );
3335 }
3336
3337 bool operator!=( ExtensionProperties const& rhs ) const
3338 {
3339 return !operator==( rhs );
3340 }
3341
3342 char extensionName[VK_MAX_EXTENSION_NAME_SIZE];
3343 uint32_t specVersion;
3344 };
3345 static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
3346
3347 struct LayerProperties
3348 {
3349 operator const VkLayerProperties&() const
3350 {
3351 return *reinterpret_cast<const VkLayerProperties*>(this);
3352 }
3353
3354 bool operator==( LayerProperties const& rhs ) const
3355 {
3356 return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 )
3357 && ( specVersion == rhs.specVersion )
3358 && ( implementationVersion == rhs.implementationVersion )
3359 && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 );
3360 }
3361
3362 bool operator!=( LayerProperties const& rhs ) const
3363 {
3364 return !operator==( rhs );
3365 }
3366
3367 char layerName[VK_MAX_EXTENSION_NAME_SIZE];
3368 uint32_t specVersion;
3369 uint32_t implementationVersion;
3370 char description[VK_MAX_DESCRIPTION_SIZE];
3371 };
3372 static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
3373
3374 struct AllocationCallbacks
3375 {
3376 AllocationCallbacks( void* pUserData_ = nullptr, PFN_vkAllocationFunction pfnAllocation_ = nullptr, PFN_vkReallocationFunction pfnReallocation_ = nullptr, PFN_vkFreeFunction pfnFree_ = nullptr, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = nullptr, PFN_vkInternalFreeNotification pfnInternalFree_ = nullptr )
3377 : pUserData( pUserData_ )
3378 , pfnAllocation( pfnAllocation_ )
3379 , pfnReallocation( pfnReallocation_ )
3380 , pfnFree( pfnFree_ )
3381 , pfnInternalAllocation( pfnInternalAllocation_ )
3382 , pfnInternalFree( pfnInternalFree_ )
3383 {
3384 }
3385
3386 AllocationCallbacks( VkAllocationCallbacks const & rhs )
3387 {
3388 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
3389 }
3390
3391 AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs )
3392 {
3393 memcpy( this, &rhs, sizeof(AllocationCallbacks) );
3394 return *this;
3395 }
3396
3397 AllocationCallbacks& setPUserData( void* pUserData_ )
3398 {
3399 pUserData = pUserData_;
3400 return *this;
3401 }
3402
3403 AllocationCallbacks& setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ )
3404 {
3405 pfnAllocation = pfnAllocation_;
3406 return *this;
3407 }
3408
3409 AllocationCallbacks& setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ )
3410 {
3411 pfnReallocation = pfnReallocation_;
3412 return *this;
3413 }
3414
3415 AllocationCallbacks& setPfnFree( PFN_vkFreeFunction pfnFree_ )
3416 {
3417 pfnFree = pfnFree_;
3418 return *this;
3419 }
3420
3421 AllocationCallbacks& setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ )
3422 {
3423 pfnInternalAllocation = pfnInternalAllocation_;
3424 return *this;
3425 }
3426
3427 AllocationCallbacks& setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ )
3428 {
3429 pfnInternalFree = pfnInternalFree_;
3430 return *this;
3431 }
3432
3433 operator const VkAllocationCallbacks&() const
3434 {
3435 return *reinterpret_cast<const VkAllocationCallbacks*>(this);
3436 }
3437
3438 bool operator==( AllocationCallbacks const& rhs ) const
3439 {
3440 return ( pUserData == rhs.pUserData )
3441 && ( pfnAllocation == rhs.pfnAllocation )
3442 && ( pfnReallocation == rhs.pfnReallocation )
3443 && ( pfnFree == rhs.pfnFree )
3444 && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
3445 && ( pfnInternalFree == rhs.pfnInternalFree );
3446 }
3447
3448 bool operator!=( AllocationCallbacks const& rhs ) const
3449 {
3450 return !operator==( rhs );
3451 }
3452
3453 void* pUserData;
3454 PFN_vkAllocationFunction pfnAllocation;
3455 PFN_vkReallocationFunction pfnReallocation;
3456 PFN_vkFreeFunction pfnFree;
3457 PFN_vkInternalAllocationNotification pfnInternalAllocation;
3458 PFN_vkInternalFreeNotification pfnInternalFree;
3459 };
3460 static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
3461
3462 struct MemoryRequirements
3463 {
3464 operator const VkMemoryRequirements&() const
3465 {
3466 return *reinterpret_cast<const VkMemoryRequirements*>(this);
3467 }
3468
3469 bool operator==( MemoryRequirements const& rhs ) const
3470 {
3471 return ( size == rhs.size )
3472 && ( alignment == rhs.alignment )
3473 && ( memoryTypeBits == rhs.memoryTypeBits );
3474 }
3475
3476 bool operator!=( MemoryRequirements const& rhs ) const
3477 {
3478 return !operator==( rhs );
3479 }
3480
3481 DeviceSize size;
3482 DeviceSize alignment;
3483 uint32_t memoryTypeBits;
3484 };
3485 static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
3486
3487 struct DescriptorBufferInfo
3488 {
3489 DescriptorBufferInfo( Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize range_ = 0 )
3490 : buffer( buffer_ )
3491 , offset( offset_ )
3492 , range( range_ )
3493 {
3494 }
3495
3496 DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs )
3497 {
3498 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3499 }
3500
3501 DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs )
3502 {
3503 memcpy( this, &rhs, sizeof(DescriptorBufferInfo) );
3504 return *this;
3505 }
3506
3507 DescriptorBufferInfo& setBuffer( Buffer buffer_ )
3508 {
3509 buffer = buffer_;
3510 return *this;
3511 }
3512
3513 DescriptorBufferInfo& setOffset( DeviceSize offset_ )
3514 {
3515 offset = offset_;
3516 return *this;
3517 }
3518
3519 DescriptorBufferInfo& setRange( DeviceSize range_ )
3520 {
3521 range = range_;
3522 return *this;
3523 }
3524
3525 operator const VkDescriptorBufferInfo&() const
3526 {
3527 return *reinterpret_cast<const VkDescriptorBufferInfo*>(this);
3528 }
3529
3530 bool operator==( DescriptorBufferInfo const& rhs ) const
3531 {
3532 return ( buffer == rhs.buffer )
3533 && ( offset == rhs.offset )
3534 && ( range == rhs.range );
3535 }
3536
3537 bool operator!=( DescriptorBufferInfo const& rhs ) const
3538 {
3539 return !operator==( rhs );
3540 }
3541
3542 Buffer buffer;
3543 DeviceSize offset;
3544 DeviceSize range;
3545 };
3546 static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
3547
3548 struct SubresourceLayout
3549 {
3550 operator const VkSubresourceLayout&() const
3551 {
3552 return *reinterpret_cast<const VkSubresourceLayout*>(this);
3553 }
3554
3555 bool operator==( SubresourceLayout const& rhs ) const
3556 {
3557 return ( offset == rhs.offset )
3558 && ( size == rhs.size )
3559 && ( rowPitch == rhs.rowPitch )
3560 && ( arrayPitch == rhs.arrayPitch )
3561 && ( depthPitch == rhs.depthPitch );
3562 }
3563
3564 bool operator!=( SubresourceLayout const& rhs ) const
3565 {
3566 return !operator==( rhs );
3567 }
3568
3569 DeviceSize offset;
3570 DeviceSize size;
3571 DeviceSize rowPitch;
3572 DeviceSize arrayPitch;
3573 DeviceSize depthPitch;
3574 };
3575 static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
3576
3577 struct BufferCopy
3578 {
3579 BufferCopy( DeviceSize srcOffset_ = 0, DeviceSize dstOffset_ = 0, DeviceSize size_ = 0 )
3580 : srcOffset( srcOffset_ )
3581 , dstOffset( dstOffset_ )
3582 , size( size_ )
3583 {
3584 }
3585
3586 BufferCopy( VkBufferCopy const & rhs )
3587 {
3588 memcpy( this, &rhs, sizeof(BufferCopy) );
3589 }
3590
3591 BufferCopy& operator=( VkBufferCopy const & rhs )
3592 {
3593 memcpy( this, &rhs, sizeof(BufferCopy) );
3594 return *this;
3595 }
3596
3597 BufferCopy& setSrcOffset( DeviceSize srcOffset_ )
3598 {
3599 srcOffset = srcOffset_;
3600 return *this;
3601 }
3602
3603 BufferCopy& setDstOffset( DeviceSize dstOffset_ )
3604 {
3605 dstOffset = dstOffset_;
3606 return *this;
3607 }
3608
3609 BufferCopy& setSize( DeviceSize size_ )
3610 {
3611 size = size_;
3612 return *this;
3613 }
3614
3615 operator const VkBufferCopy&() const
3616 {
3617 return *reinterpret_cast<const VkBufferCopy*>(this);
3618 }
3619
3620 bool operator==( BufferCopy const& rhs ) const
3621 {
3622 return ( srcOffset == rhs.srcOffset )
3623 && ( dstOffset == rhs.dstOffset )
3624 && ( size == rhs.size );
3625 }
3626
3627 bool operator!=( BufferCopy const& rhs ) const
3628 {
3629 return !operator==( rhs );
3630 }
3631
3632 DeviceSize srcOffset;
3633 DeviceSize dstOffset;
3634 DeviceSize size;
3635 };
3636 static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
3637
3638 struct SpecializationMapEntry
3639 {
3640 SpecializationMapEntry( uint32_t constantID_ = 0, uint32_t offset_ = 0, size_t size_ = 0 )
3641 : constantID( constantID_ )
3642 , offset( offset_ )
3643 , size( size_ )
3644 {
3645 }
3646
3647 SpecializationMapEntry( VkSpecializationMapEntry const & rhs )
3648 {
3649 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3650 }
3651
3652 SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs )
3653 {
3654 memcpy( this, &rhs, sizeof(SpecializationMapEntry) );
3655 return *this;
3656 }
3657
3658 SpecializationMapEntry& setConstantID( uint32_t constantID_ )
3659 {
3660 constantID = constantID_;
3661 return *this;
3662 }
3663
3664 SpecializationMapEntry& setOffset( uint32_t offset_ )
3665 {
3666 offset = offset_;
3667 return *this;
3668 }
3669
3670 SpecializationMapEntry& setSize( size_t size_ )
3671 {
3672 size = size_;
3673 return *this;
3674 }
3675
3676 operator const VkSpecializationMapEntry&() const
3677 {
3678 return *reinterpret_cast<const VkSpecializationMapEntry*>(this);
3679 }
3680
3681 bool operator==( SpecializationMapEntry const& rhs ) const
3682 {
3683 return ( constantID == rhs.constantID )
3684 && ( offset == rhs.offset )
3685 && ( size == rhs.size );
3686 }
3687
3688 bool operator!=( SpecializationMapEntry const& rhs ) const
3689 {
3690 return !operator==( rhs );
3691 }
3692
3693 uint32_t constantID;
3694 uint32_t offset;
3695 size_t size;
3696 };
3697 static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
3698
3699 struct SpecializationInfo
3700 {
3701 SpecializationInfo( uint32_t mapEntryCount_ = 0, const SpecializationMapEntry* pMapEntries_ = nullptr, size_t dataSize_ = 0, const void* pData_ = nullptr )
3702 : mapEntryCount( mapEntryCount_ )
3703 , pMapEntries( pMapEntries_ )
3704 , dataSize( dataSize_ )
3705 , pData( pData_ )
3706 {
3707 }
3708
3709 SpecializationInfo( VkSpecializationInfo const & rhs )
3710 {
3711 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3712 }
3713
3714 SpecializationInfo& operator=( VkSpecializationInfo const & rhs )
3715 {
3716 memcpy( this, &rhs, sizeof(SpecializationInfo) );
3717 return *this;
3718 }
3719
3720 SpecializationInfo& setMapEntryCount( uint32_t mapEntryCount_ )
3721 {
3722 mapEntryCount = mapEntryCount_;
3723 return *this;
3724 }
3725
3726 SpecializationInfo& setPMapEntries( const SpecializationMapEntry* pMapEntries_ )
3727 {
3728 pMapEntries = pMapEntries_;
3729 return *this;
3730 }
3731
3732 SpecializationInfo& setDataSize( size_t dataSize_ )
3733 {
3734 dataSize = dataSize_;
3735 return *this;
3736 }
3737
3738 SpecializationInfo& setPData( const void* pData_ )
3739 {
3740 pData = pData_;
3741 return *this;
3742 }
3743
3744 operator const VkSpecializationInfo&() const
3745 {
3746 return *reinterpret_cast<const VkSpecializationInfo*>(this);
3747 }
3748
3749 bool operator==( SpecializationInfo const& rhs ) const
3750 {
3751 return ( mapEntryCount == rhs.mapEntryCount )
3752 && ( pMapEntries == rhs.pMapEntries )
3753 && ( dataSize == rhs.dataSize )
3754 && ( pData == rhs.pData );
3755 }
3756
3757 bool operator!=( SpecializationInfo const& rhs ) const
3758 {
3759 return !operator==( rhs );
3760 }
3761
3762 uint32_t mapEntryCount;
3763 const SpecializationMapEntry* pMapEntries;
3764 size_t dataSize;
3765 const void* pData;
3766 };
3767 static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
3768
3769 union ClearColorValue
3770 {
3771 ClearColorValue( const std::array<float,4>& float32_ = { {0} } )
3772 {
3773 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3774 }
3775
3776 ClearColorValue( const std::array<int32_t,4>& int32_ )
3777 {
3778 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3779 }
3780
3781 ClearColorValue( const std::array<uint32_t,4>& uint32_ )
3782 {
3783 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3784 }
3785
3786 ClearColorValue& setFloat32( std::array<float,4> float32_ )
3787 {
3788 memcpy( &float32, float32_.data(), 4 * sizeof( float ) );
3789 return *this;
3790 }
3791
3792 ClearColorValue& setInt32( std::array<int32_t,4> int32_ )
3793 {
3794 memcpy( &int32, int32_.data(), 4 * sizeof( int32_t ) );
3795 return *this;
3796 }
3797
3798 ClearColorValue& setUint32( std::array<uint32_t,4> uint32_ )
3799 {
3800 memcpy( &uint32, uint32_.data(), 4 * sizeof( uint32_t ) );
3801 return *this;
3802 }
3803
3804 operator VkClearColorValue const& () const
3805 {
3806 return *reinterpret_cast<const VkClearColorValue*>(this);
3807 }
3808
3809 float float32[4];
3810 int32_t int32[4];
3811 uint32_t uint32[4];
3812 };
3813
3814 struct ClearDepthStencilValue
3815 {
3816 ClearDepthStencilValue( float depth_ = 0, uint32_t stencil_ = 0 )
3817 : depth( depth_ )
3818 , stencil( stencil_ )
3819 {
3820 }
3821
3822 ClearDepthStencilValue( VkClearDepthStencilValue const & rhs )
3823 {
3824 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3825 }
3826
3827 ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs )
3828 {
3829 memcpy( this, &rhs, sizeof(ClearDepthStencilValue) );
3830 return *this;
3831 }
3832
3833 ClearDepthStencilValue& setDepth( float depth_ )
3834 {
3835 depth = depth_;
3836 return *this;
3837 }
3838
3839 ClearDepthStencilValue& setStencil( uint32_t stencil_ )
3840 {
3841 stencil = stencil_;
3842 return *this;
3843 }
3844
3845 operator const VkClearDepthStencilValue&() const
3846 {
3847 return *reinterpret_cast<const VkClearDepthStencilValue*>(this);
3848 }
3849
3850 bool operator==( ClearDepthStencilValue const& rhs ) const
3851 {
3852 return ( depth == rhs.depth )
3853 && ( stencil == rhs.stencil );
3854 }
3855
3856 bool operator!=( ClearDepthStencilValue const& rhs ) const
3857 {
3858 return !operator==( rhs );
3859 }
3860
3861 float depth;
3862 uint32_t stencil;
3863 };
3864 static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
3865
3866 union ClearValue
3867 {
3868 ClearValue( ClearColorValue color_ = ClearColorValue() )
3869 {
3870 color = color_;
3871 }
3872
3873 ClearValue( ClearDepthStencilValue depthStencil_ )
3874 {
3875 depthStencil = depthStencil_;
3876 }
3877
3878 ClearValue& setColor( ClearColorValue color_ )
3879 {
3880 color = color_;
3881 return *this;
3882 }
3883
3884 ClearValue& setDepthStencil( ClearDepthStencilValue depthStencil_ )
3885 {
3886 depthStencil = depthStencil_;
3887 return *this;
3888 }
3889
3890 operator VkClearValue const& () const
3891 {
3892 return *reinterpret_cast<const VkClearValue*>(this);
3893 }
3894
3895#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3896 ClearColorValue color;
3897 ClearDepthStencilValue depthStencil;
3898#else
3899 VkClearColorValue color;
3900 VkClearDepthStencilValue depthStencil;
3901#endif // VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
3902 };
3903
3904 struct PhysicalDeviceFeatures
3905 {
3906 PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = 0, Bool32 fullDrawIndexUint32_ = 0, Bool32 imageCubeArray_ = 0, Bool32 independentBlend_ = 0, Bool32 geometryShader_ = 0, Bool32 tessellationShader_ = 0, Bool32 sampleRateShading_ = 0, Bool32 dualSrcBlend_ = 0, Bool32 logicOp_ = 0, Bool32 multiDrawIndirect_ = 0, Bool32 drawIndirectFirstInstance_ = 0, Bool32 depthClamp_ = 0, Bool32 depthBiasClamp_ = 0, Bool32 fillModeNonSolid_ = 0, Bool32 depthBounds_ = 0, Bool32 wideLines_ = 0, Bool32 largePoints_ = 0, Bool32 alphaToOne_ = 0, Bool32 multiViewport_ = 0, Bool32 samplerAnisotropy_ = 0, Bool32 textureCompressionETC2_ = 0, Bool32 textureCompressionASTC_LDR_ = 0, Bool32 textureCompressionBC_ = 0, Bool32 occlusionQueryPrecise_ = 0, Bool32 pipelineStatisticsQuery_ = 0, Bool32 vertexPipelineStoresAndAtomics_ = 0, Bool32 fragmentStoresAndAtomics_ = 0, Bool32 shaderTessellationAndGeometryPointSize_ = 0, Bool32 shaderImageGatherExtended_ = 0, Bool32 shaderStorageImageExtendedFormats_ = 0, Bool32 shaderStorageImageMultisample_ = 0, Bool32 shaderStorageImageReadWithoutFormat_ = 0, Bool32 shaderStorageImageWriteWithoutFormat_ = 0, Bool32 shaderUniformBufferArrayDynamicIndexing_ = 0, Bool32 shaderSampledImageArrayDynamicIndexing_ = 0, Bool32 shaderStorageBufferArrayDynamicIndexing_ = 0, Bool32 shaderStorageImageArrayDynamicIndexing_ = 0, Bool32 shaderClipDistance_ = 0, Bool32 shaderCullDistance_ = 0, Bool32 shaderFloat64_ = 0, Bool32 shaderInt64_ = 0, Bool32 shaderInt16_ = 0, Bool32 shaderResourceResidency_ = 0, Bool32 shaderResourceMinLod_ = 0, Bool32 sparseBinding_ = 0, Bool32 sparseResidencyBuffer_ = 0, Bool32 sparseResidencyImage2D_ = 0, Bool32 sparseResidencyImage3D_ = 0, Bool32 sparseResidency2Samples_ = 0, Bool32 sparseResidency4Samples_ = 0, Bool32 sparseResidency8Samples_ = 0, Bool32 sparseResidency16Samples_ = 0, Bool32 sparseResidencyAliased_ = 0, Bool32 variableMultisampleRate_ = 0, Bool32 inheritedQueries_ = 0 )
3907 : robustBufferAccess( robustBufferAccess_ )
3908 , fullDrawIndexUint32( fullDrawIndexUint32_ )
3909 , imageCubeArray( imageCubeArray_ )
3910 , independentBlend( independentBlend_ )
3911 , geometryShader( geometryShader_ )
3912 , tessellationShader( tessellationShader_ )
3913 , sampleRateShading( sampleRateShading_ )
3914 , dualSrcBlend( dualSrcBlend_ )
3915 , logicOp( logicOp_ )
3916 , multiDrawIndirect( multiDrawIndirect_ )
3917 , drawIndirectFirstInstance( drawIndirectFirstInstance_ )
3918 , depthClamp( depthClamp_ )
3919 , depthBiasClamp( depthBiasClamp_ )
3920 , fillModeNonSolid( fillModeNonSolid_ )
3921 , depthBounds( depthBounds_ )
3922 , wideLines( wideLines_ )
3923 , largePoints( largePoints_ )
3924 , alphaToOne( alphaToOne_ )
3925 , multiViewport( multiViewport_ )
3926 , samplerAnisotropy( samplerAnisotropy_ )
3927 , textureCompressionETC2( textureCompressionETC2_ )
3928 , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ )
3929 , textureCompressionBC( textureCompressionBC_ )
3930 , occlusionQueryPrecise( occlusionQueryPrecise_ )
3931 , pipelineStatisticsQuery( pipelineStatisticsQuery_ )
3932 , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ )
3933 , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ )
3934 , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ )
3935 , shaderImageGatherExtended( shaderImageGatherExtended_ )
3936 , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ )
3937 , shaderStorageImageMultisample( shaderStorageImageMultisample_ )
3938 , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ )
3939 , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ )
3940 , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ )
3941 , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ )
3942 , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ )
3943 , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ )
3944 , shaderClipDistance( shaderClipDistance_ )
3945 , shaderCullDistance( shaderCullDistance_ )
3946 , shaderFloat64( shaderFloat64_ )
3947 , shaderInt64( shaderInt64_ )
3948 , shaderInt16( shaderInt16_ )
3949 , shaderResourceResidency( shaderResourceResidency_ )
3950 , shaderResourceMinLod( shaderResourceMinLod_ )
3951 , sparseBinding( sparseBinding_ )
3952 , sparseResidencyBuffer( sparseResidencyBuffer_ )
3953 , sparseResidencyImage2D( sparseResidencyImage2D_ )
3954 , sparseResidencyImage3D( sparseResidencyImage3D_ )
3955 , sparseResidency2Samples( sparseResidency2Samples_ )
3956 , sparseResidency4Samples( sparseResidency4Samples_ )
3957 , sparseResidency8Samples( sparseResidency8Samples_ )
3958 , sparseResidency16Samples( sparseResidency16Samples_ )
3959 , sparseResidencyAliased( sparseResidencyAliased_ )
3960 , variableMultisampleRate( variableMultisampleRate_ )
3961 , inheritedQueries( inheritedQueries_ )
3962 {
3963 }
3964
3965 PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs )
3966 {
3967 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3968 }
3969
3970 PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs )
3971 {
3972 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures) );
3973 return *this;
3974 }
3975
3976 PhysicalDeviceFeatures& setRobustBufferAccess( Bool32 robustBufferAccess_ )
3977 {
3978 robustBufferAccess = robustBufferAccess_;
3979 return *this;
3980 }
3981
3982 PhysicalDeviceFeatures& setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ )
3983 {
3984 fullDrawIndexUint32 = fullDrawIndexUint32_;
3985 return *this;
3986 }
3987
3988 PhysicalDeviceFeatures& setImageCubeArray( Bool32 imageCubeArray_ )
3989 {
3990 imageCubeArray = imageCubeArray_;
3991 return *this;
3992 }
3993
3994 PhysicalDeviceFeatures& setIndependentBlend( Bool32 independentBlend_ )
3995 {
3996 independentBlend = independentBlend_;
3997 return *this;
3998 }
3999
4000 PhysicalDeviceFeatures& setGeometryShader( Bool32 geometryShader_ )
4001 {
4002 geometryShader = geometryShader_;
4003 return *this;
4004 }
4005
4006 PhysicalDeviceFeatures& setTessellationShader( Bool32 tessellationShader_ )
4007 {
4008 tessellationShader = tessellationShader_;
4009 return *this;
4010 }
4011
4012 PhysicalDeviceFeatures& setSampleRateShading( Bool32 sampleRateShading_ )
4013 {
4014 sampleRateShading = sampleRateShading_;
4015 return *this;
4016 }
4017
4018 PhysicalDeviceFeatures& setDualSrcBlend( Bool32 dualSrcBlend_ )
4019 {
4020 dualSrcBlend = dualSrcBlend_;
4021 return *this;
4022 }
4023
4024 PhysicalDeviceFeatures& setLogicOp( Bool32 logicOp_ )
4025 {
4026 logicOp = logicOp_;
4027 return *this;
4028 }
4029
4030 PhysicalDeviceFeatures& setMultiDrawIndirect( Bool32 multiDrawIndirect_ )
4031 {
4032 multiDrawIndirect = multiDrawIndirect_;
4033 return *this;
4034 }
4035
4036 PhysicalDeviceFeatures& setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ )
4037 {
4038 drawIndirectFirstInstance = drawIndirectFirstInstance_;
4039 return *this;
4040 }
4041
4042 PhysicalDeviceFeatures& setDepthClamp( Bool32 depthClamp_ )
4043 {
4044 depthClamp = depthClamp_;
4045 return *this;
4046 }
4047
4048 PhysicalDeviceFeatures& setDepthBiasClamp( Bool32 depthBiasClamp_ )
4049 {
4050 depthBiasClamp = depthBiasClamp_;
4051 return *this;
4052 }
4053
4054 PhysicalDeviceFeatures& setFillModeNonSolid( Bool32 fillModeNonSolid_ )
4055 {
4056 fillModeNonSolid = fillModeNonSolid_;
4057 return *this;
4058 }
4059
4060 PhysicalDeviceFeatures& setDepthBounds( Bool32 depthBounds_ )
4061 {
4062 depthBounds = depthBounds_;
4063 return *this;
4064 }
4065
4066 PhysicalDeviceFeatures& setWideLines( Bool32 wideLines_ )
4067 {
4068 wideLines = wideLines_;
4069 return *this;
4070 }
4071
4072 PhysicalDeviceFeatures& setLargePoints( Bool32 largePoints_ )
4073 {
4074 largePoints = largePoints_;
4075 return *this;
4076 }
4077
4078 PhysicalDeviceFeatures& setAlphaToOne( Bool32 alphaToOne_ )
4079 {
4080 alphaToOne = alphaToOne_;
4081 return *this;
4082 }
4083
4084 PhysicalDeviceFeatures& setMultiViewport( Bool32 multiViewport_ )
4085 {
4086 multiViewport = multiViewport_;
4087 return *this;
4088 }
4089
4090 PhysicalDeviceFeatures& setSamplerAnisotropy( Bool32 samplerAnisotropy_ )
4091 {
4092 samplerAnisotropy = samplerAnisotropy_;
4093 return *this;
4094 }
4095
4096 PhysicalDeviceFeatures& setTextureCompressionETC2( Bool32 textureCompressionETC2_ )
4097 {
4098 textureCompressionETC2 = textureCompressionETC2_;
4099 return *this;
4100 }
4101
4102 PhysicalDeviceFeatures& setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ )
4103 {
4104 textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
4105 return *this;
4106 }
4107
4108 PhysicalDeviceFeatures& setTextureCompressionBC( Bool32 textureCompressionBC_ )
4109 {
4110 textureCompressionBC = textureCompressionBC_;
4111 return *this;
4112 }
4113
4114 PhysicalDeviceFeatures& setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ )
4115 {
4116 occlusionQueryPrecise = occlusionQueryPrecise_;
4117 return *this;
4118 }
4119
4120 PhysicalDeviceFeatures& setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ )
4121 {
4122 pipelineStatisticsQuery = pipelineStatisticsQuery_;
4123 return *this;
4124 }
4125
4126 PhysicalDeviceFeatures& setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ )
4127 {
4128 vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
4129 return *this;
4130 }
4131
4132 PhysicalDeviceFeatures& setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ )
4133 {
4134 fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
4135 return *this;
4136 }
4137
4138 PhysicalDeviceFeatures& setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ )
4139 {
4140 shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
4141 return *this;
4142 }
4143
4144 PhysicalDeviceFeatures& setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ )
4145 {
4146 shaderImageGatherExtended = shaderImageGatherExtended_;
4147 return *this;
4148 }
4149
4150 PhysicalDeviceFeatures& setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ )
4151 {
4152 shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
4153 return *this;
4154 }
4155
4156 PhysicalDeviceFeatures& setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ )
4157 {
4158 shaderStorageImageMultisample = shaderStorageImageMultisample_;
4159 return *this;
4160 }
4161
4162 PhysicalDeviceFeatures& setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ )
4163 {
4164 shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
4165 return *this;
4166 }
4167
4168 PhysicalDeviceFeatures& setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ )
4169 {
4170 shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
4171 return *this;
4172 }
4173
4174 PhysicalDeviceFeatures& setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ )
4175 {
4176 shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
4177 return *this;
4178 }
4179
4180 PhysicalDeviceFeatures& setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ )
4181 {
4182 shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
4183 return *this;
4184 }
4185
4186 PhysicalDeviceFeatures& setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ )
4187 {
4188 shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
4189 return *this;
4190 }
4191
4192 PhysicalDeviceFeatures& setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ )
4193 {
4194 shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
4195 return *this;
4196 }
4197
4198 PhysicalDeviceFeatures& setShaderClipDistance( Bool32 shaderClipDistance_ )
4199 {
4200 shaderClipDistance = shaderClipDistance_;
4201 return *this;
4202 }
4203
4204 PhysicalDeviceFeatures& setShaderCullDistance( Bool32 shaderCullDistance_ )
4205 {
4206 shaderCullDistance = shaderCullDistance_;
4207 return *this;
4208 }
4209
4210 PhysicalDeviceFeatures& setShaderFloat64( Bool32 shaderFloat64_ )
4211 {
4212 shaderFloat64 = shaderFloat64_;
4213 return *this;
4214 }
4215
4216 PhysicalDeviceFeatures& setShaderInt64( Bool32 shaderInt64_ )
4217 {
4218 shaderInt64 = shaderInt64_;
4219 return *this;
4220 }
4221
4222 PhysicalDeviceFeatures& setShaderInt16( Bool32 shaderInt16_ )
4223 {
4224 shaderInt16 = shaderInt16_;
4225 return *this;
4226 }
4227
4228 PhysicalDeviceFeatures& setShaderResourceResidency( Bool32 shaderResourceResidency_ )
4229 {
4230 shaderResourceResidency = shaderResourceResidency_;
4231 return *this;
4232 }
4233
4234 PhysicalDeviceFeatures& setShaderResourceMinLod( Bool32 shaderResourceMinLod_ )
4235 {
4236 shaderResourceMinLod = shaderResourceMinLod_;
4237 return *this;
4238 }
4239
4240 PhysicalDeviceFeatures& setSparseBinding( Bool32 sparseBinding_ )
4241 {
4242 sparseBinding = sparseBinding_;
4243 return *this;
4244 }
4245
4246 PhysicalDeviceFeatures& setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ )
4247 {
4248 sparseResidencyBuffer = sparseResidencyBuffer_;
4249 return *this;
4250 }
4251
4252 PhysicalDeviceFeatures& setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ )
4253 {
4254 sparseResidencyImage2D = sparseResidencyImage2D_;
4255 return *this;
4256 }
4257
4258 PhysicalDeviceFeatures& setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ )
4259 {
4260 sparseResidencyImage3D = sparseResidencyImage3D_;
4261 return *this;
4262 }
4263
4264 PhysicalDeviceFeatures& setSparseResidency2Samples( Bool32 sparseResidency2Samples_ )
4265 {
4266 sparseResidency2Samples = sparseResidency2Samples_;
4267 return *this;
4268 }
4269
4270 PhysicalDeviceFeatures& setSparseResidency4Samples( Bool32 sparseResidency4Samples_ )
4271 {
4272 sparseResidency4Samples = sparseResidency4Samples_;
4273 return *this;
4274 }
4275
4276 PhysicalDeviceFeatures& setSparseResidency8Samples( Bool32 sparseResidency8Samples_ )
4277 {
4278 sparseResidency8Samples = sparseResidency8Samples_;
4279 return *this;
4280 }
4281
4282 PhysicalDeviceFeatures& setSparseResidency16Samples( Bool32 sparseResidency16Samples_ )
4283 {
4284 sparseResidency16Samples = sparseResidency16Samples_;
4285 return *this;
4286 }
4287
4288 PhysicalDeviceFeatures& setSparseResidencyAliased( Bool32 sparseResidencyAliased_ )
4289 {
4290 sparseResidencyAliased = sparseResidencyAliased_;
4291 return *this;
4292 }
4293
4294 PhysicalDeviceFeatures& setVariableMultisampleRate( Bool32 variableMultisampleRate_ )
4295 {
4296 variableMultisampleRate = variableMultisampleRate_;
4297 return *this;
4298 }
4299
4300 PhysicalDeviceFeatures& setInheritedQueries( Bool32 inheritedQueries_ )
4301 {
4302 inheritedQueries = inheritedQueries_;
4303 return *this;
4304 }
4305
4306 operator const VkPhysicalDeviceFeatures&() const
4307 {
4308 return *reinterpret_cast<const VkPhysicalDeviceFeatures*>(this);
4309 }
4310
4311 bool operator==( PhysicalDeviceFeatures const& rhs ) const
4312 {
4313 return ( robustBufferAccess == rhs.robustBufferAccess )
4314 && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
4315 && ( imageCubeArray == rhs.imageCubeArray )
4316 && ( independentBlend == rhs.independentBlend )
4317 && ( geometryShader == rhs.geometryShader )
4318 && ( tessellationShader == rhs.tessellationShader )
4319 && ( sampleRateShading == rhs.sampleRateShading )
4320 && ( dualSrcBlend == rhs.dualSrcBlend )
4321 && ( logicOp == rhs.logicOp )
4322 && ( multiDrawIndirect == rhs.multiDrawIndirect )
4323 && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
4324 && ( depthClamp == rhs.depthClamp )
4325 && ( depthBiasClamp == rhs.depthBiasClamp )
4326 && ( fillModeNonSolid == rhs.fillModeNonSolid )
4327 && ( depthBounds == rhs.depthBounds )
4328 && ( wideLines == rhs.wideLines )
4329 && ( largePoints == rhs.largePoints )
4330 && ( alphaToOne == rhs.alphaToOne )
4331 && ( multiViewport == rhs.multiViewport )
4332 && ( samplerAnisotropy == rhs.samplerAnisotropy )
4333 && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
4334 && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
4335 && ( textureCompressionBC == rhs.textureCompressionBC )
4336 && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
4337 && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
4338 && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
4339 && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
4340 && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
4341 && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
4342 && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
4343 && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
4344 && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
4345 && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
4346 && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
4347 && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
4348 && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
4349 && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
4350 && ( shaderClipDistance == rhs.shaderClipDistance )
4351 && ( shaderCullDistance == rhs.shaderCullDistance )
4352 && ( shaderFloat64 == rhs.shaderFloat64 )
4353 && ( shaderInt64 == rhs.shaderInt64 )
4354 && ( shaderInt16 == rhs.shaderInt16 )
4355 && ( shaderResourceResidency == rhs.shaderResourceResidency )
4356 && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
4357 && ( sparseBinding == rhs.sparseBinding )
4358 && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
4359 && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
4360 && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
4361 && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
4362 && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
4363 && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
4364 && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
4365 && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
4366 && ( variableMultisampleRate == rhs.variableMultisampleRate )
4367 && ( inheritedQueries == rhs.inheritedQueries );
4368 }
4369
4370 bool operator!=( PhysicalDeviceFeatures const& rhs ) const
4371 {
4372 return !operator==( rhs );
4373 }
4374
4375 Bool32 robustBufferAccess;
4376 Bool32 fullDrawIndexUint32;
4377 Bool32 imageCubeArray;
4378 Bool32 independentBlend;
4379 Bool32 geometryShader;
4380 Bool32 tessellationShader;
4381 Bool32 sampleRateShading;
4382 Bool32 dualSrcBlend;
4383 Bool32 logicOp;
4384 Bool32 multiDrawIndirect;
4385 Bool32 drawIndirectFirstInstance;
4386 Bool32 depthClamp;
4387 Bool32 depthBiasClamp;
4388 Bool32 fillModeNonSolid;
4389 Bool32 depthBounds;
4390 Bool32 wideLines;
4391 Bool32 largePoints;
4392 Bool32 alphaToOne;
4393 Bool32 multiViewport;
4394 Bool32 samplerAnisotropy;
4395 Bool32 textureCompressionETC2;
4396 Bool32 textureCompressionASTC_LDR;
4397 Bool32 textureCompressionBC;
4398 Bool32 occlusionQueryPrecise;
4399 Bool32 pipelineStatisticsQuery;
4400 Bool32 vertexPipelineStoresAndAtomics;
4401 Bool32 fragmentStoresAndAtomics;
4402 Bool32 shaderTessellationAndGeometryPointSize;
4403 Bool32 shaderImageGatherExtended;
4404 Bool32 shaderStorageImageExtendedFormats;
4405 Bool32 shaderStorageImageMultisample;
4406 Bool32 shaderStorageImageReadWithoutFormat;
4407 Bool32 shaderStorageImageWriteWithoutFormat;
4408 Bool32 shaderUniformBufferArrayDynamicIndexing;
4409 Bool32 shaderSampledImageArrayDynamicIndexing;
4410 Bool32 shaderStorageBufferArrayDynamicIndexing;
4411 Bool32 shaderStorageImageArrayDynamicIndexing;
4412 Bool32 shaderClipDistance;
4413 Bool32 shaderCullDistance;
4414 Bool32 shaderFloat64;
4415 Bool32 shaderInt64;
4416 Bool32 shaderInt16;
4417 Bool32 shaderResourceResidency;
4418 Bool32 shaderResourceMinLod;
4419 Bool32 sparseBinding;
4420 Bool32 sparseResidencyBuffer;
4421 Bool32 sparseResidencyImage2D;
4422 Bool32 sparseResidencyImage3D;
4423 Bool32 sparseResidency2Samples;
4424 Bool32 sparseResidency4Samples;
4425 Bool32 sparseResidency8Samples;
4426 Bool32 sparseResidency16Samples;
4427 Bool32 sparseResidencyAliased;
4428 Bool32 variableMultisampleRate;
4429 Bool32 inheritedQueries;
4430 };
4431 static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
4432
4433 struct PhysicalDeviceSparseProperties
4434 {
4435 operator const VkPhysicalDeviceSparseProperties&() const
4436 {
4437 return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>(this);
4438 }
4439
4440 bool operator==( PhysicalDeviceSparseProperties const& rhs ) const
4441 {
4442 return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
4443 && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
4444 && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
4445 && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
4446 && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
4447 }
4448
4449 bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const
4450 {
4451 return !operator==( rhs );
4452 }
4453
4454 Bool32 residencyStandard2DBlockShape;
4455 Bool32 residencyStandard2DMultisampleBlockShape;
4456 Bool32 residencyStandard3DBlockShape;
4457 Bool32 residencyAlignedMipSize;
4458 Bool32 residencyNonResidentStrict;
4459 };
4460 static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
4461
4462 struct DrawIndirectCommand
4463 {
4464 DrawIndirectCommand( uint32_t vertexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstVertex_ = 0, uint32_t firstInstance_ = 0 )
4465 : vertexCount( vertexCount_ )
4466 , instanceCount( instanceCount_ )
4467 , firstVertex( firstVertex_ )
4468 , firstInstance( firstInstance_ )
4469 {
4470 }
4471
4472 DrawIndirectCommand( VkDrawIndirectCommand const & rhs )
4473 {
4474 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4475 }
4476
4477 DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs )
4478 {
4479 memcpy( this, &rhs, sizeof(DrawIndirectCommand) );
4480 return *this;
4481 }
4482
4483 DrawIndirectCommand& setVertexCount( uint32_t vertexCount_ )
4484 {
4485 vertexCount = vertexCount_;
4486 return *this;
4487 }
4488
4489 DrawIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4490 {
4491 instanceCount = instanceCount_;
4492 return *this;
4493 }
4494
4495 DrawIndirectCommand& setFirstVertex( uint32_t firstVertex_ )
4496 {
4497 firstVertex = firstVertex_;
4498 return *this;
4499 }
4500
4501 DrawIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4502 {
4503 firstInstance = firstInstance_;
4504 return *this;
4505 }
4506
4507 operator const VkDrawIndirectCommand&() const
4508 {
4509 return *reinterpret_cast<const VkDrawIndirectCommand*>(this);
4510 }
4511
4512 bool operator==( DrawIndirectCommand const& rhs ) const
4513 {
4514 return ( vertexCount == rhs.vertexCount )
4515 && ( instanceCount == rhs.instanceCount )
4516 && ( firstVertex == rhs.firstVertex )
4517 && ( firstInstance == rhs.firstInstance );
4518 }
4519
4520 bool operator!=( DrawIndirectCommand const& rhs ) const
4521 {
4522 return !operator==( rhs );
4523 }
4524
4525 uint32_t vertexCount;
4526 uint32_t instanceCount;
4527 uint32_t firstVertex;
4528 uint32_t firstInstance;
4529 };
4530 static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
4531
4532 struct DrawIndexedIndirectCommand
4533 {
4534 DrawIndexedIndirectCommand( uint32_t indexCount_ = 0, uint32_t instanceCount_ = 0, uint32_t firstIndex_ = 0, int32_t vertexOffset_ = 0, uint32_t firstInstance_ = 0 )
4535 : indexCount( indexCount_ )
4536 , instanceCount( instanceCount_ )
4537 , firstIndex( firstIndex_ )
4538 , vertexOffset( vertexOffset_ )
4539 , firstInstance( firstInstance_ )
4540 {
4541 }
4542
4543 DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs )
4544 {
4545 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4546 }
4547
4548 DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs )
4549 {
4550 memcpy( this, &rhs, sizeof(DrawIndexedIndirectCommand) );
4551 return *this;
4552 }
4553
4554 DrawIndexedIndirectCommand& setIndexCount( uint32_t indexCount_ )
4555 {
4556 indexCount = indexCount_;
4557 return *this;
4558 }
4559
4560 DrawIndexedIndirectCommand& setInstanceCount( uint32_t instanceCount_ )
4561 {
4562 instanceCount = instanceCount_;
4563 return *this;
4564 }
4565
4566 DrawIndexedIndirectCommand& setFirstIndex( uint32_t firstIndex_ )
4567 {
4568 firstIndex = firstIndex_;
4569 return *this;
4570 }
4571
4572 DrawIndexedIndirectCommand& setVertexOffset( int32_t vertexOffset_ )
4573 {
4574 vertexOffset = vertexOffset_;
4575 return *this;
4576 }
4577
4578 DrawIndexedIndirectCommand& setFirstInstance( uint32_t firstInstance_ )
4579 {
4580 firstInstance = firstInstance_;
4581 return *this;
4582 }
4583
4584 operator const VkDrawIndexedIndirectCommand&() const
4585 {
4586 return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>(this);
4587 }
4588
4589 bool operator==( DrawIndexedIndirectCommand const& rhs ) const
4590 {
4591 return ( indexCount == rhs.indexCount )
4592 && ( instanceCount == rhs.instanceCount )
4593 && ( firstIndex == rhs.firstIndex )
4594 && ( vertexOffset == rhs.vertexOffset )
4595 && ( firstInstance == rhs.firstInstance );
4596 }
4597
4598 bool operator!=( DrawIndexedIndirectCommand const& rhs ) const
4599 {
4600 return !operator==( rhs );
4601 }
4602
4603 uint32_t indexCount;
4604 uint32_t instanceCount;
4605 uint32_t firstIndex;
4606 int32_t vertexOffset;
4607 uint32_t firstInstance;
4608 };
4609 static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
4610
4611 struct DispatchIndirectCommand
4612 {
4613 DispatchIndirectCommand( uint32_t x_ = 0, uint32_t y_ = 0, uint32_t z_ = 0 )
4614 : x( x_ )
4615 , y( y_ )
4616 , z( z_ )
4617 {
4618 }
4619
4620 DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs )
4621 {
4622 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4623 }
4624
4625 DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs )
4626 {
4627 memcpy( this, &rhs, sizeof(DispatchIndirectCommand) );
4628 return *this;
4629 }
4630
4631 DispatchIndirectCommand& setX( uint32_t x_ )
4632 {
4633 x = x_;
4634 return *this;
4635 }
4636
4637 DispatchIndirectCommand& setY( uint32_t y_ )
4638 {
4639 y = y_;
4640 return *this;
4641 }
4642
4643 DispatchIndirectCommand& setZ( uint32_t z_ )
4644 {
4645 z = z_;
4646 return *this;
4647 }
4648
4649 operator const VkDispatchIndirectCommand&() const
4650 {
4651 return *reinterpret_cast<const VkDispatchIndirectCommand*>(this);
4652 }
4653
4654 bool operator==( DispatchIndirectCommand const& rhs ) const
4655 {
4656 return ( x == rhs.x )
4657 && ( y == rhs.y )
4658 && ( z == rhs.z );
4659 }
4660
4661 bool operator!=( DispatchIndirectCommand const& rhs ) const
4662 {
4663 return !operator==( rhs );
4664 }
4665
4666 uint32_t x;
4667 uint32_t y;
4668 uint32_t z;
4669 };
4670 static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
4671
4672 struct DisplayPlanePropertiesKHR
4673 {
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004674 operator const VkDisplayPlanePropertiesKHR&() const
4675 {
4676 return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>(this);
4677 }
4678
4679 bool operator==( DisplayPlanePropertiesKHR const& rhs ) const
4680 {
4681 return ( currentDisplay == rhs.currentDisplay )
4682 && ( currentStackIndex == rhs.currentStackIndex );
4683 }
4684
4685 bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const
4686 {
4687 return !operator==( rhs );
4688 }
4689
4690 DisplayKHR currentDisplay;
4691 uint32_t currentStackIndex;
4692 };
4693 static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
4694
4695 struct DisplayModeParametersKHR
4696 {
4697 DisplayModeParametersKHR( Extent2D visibleRegion_ = Extent2D(), uint32_t refreshRate_ = 0 )
4698 : visibleRegion( visibleRegion_ )
4699 , refreshRate( refreshRate_ )
4700 {
4701 }
4702
4703 DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs )
4704 {
4705 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4706 }
4707
4708 DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs )
4709 {
4710 memcpy( this, &rhs, sizeof(DisplayModeParametersKHR) );
4711 return *this;
4712 }
4713
4714 DisplayModeParametersKHR& setVisibleRegion( Extent2D visibleRegion_ )
4715 {
4716 visibleRegion = visibleRegion_;
4717 return *this;
4718 }
4719
4720 DisplayModeParametersKHR& setRefreshRate( uint32_t refreshRate_ )
4721 {
4722 refreshRate = refreshRate_;
4723 return *this;
4724 }
4725
4726 operator const VkDisplayModeParametersKHR&() const
4727 {
4728 return *reinterpret_cast<const VkDisplayModeParametersKHR*>(this);
4729 }
4730
4731 bool operator==( DisplayModeParametersKHR const& rhs ) const
4732 {
4733 return ( visibleRegion == rhs.visibleRegion )
4734 && ( refreshRate == rhs.refreshRate );
4735 }
4736
4737 bool operator!=( DisplayModeParametersKHR const& rhs ) const
4738 {
4739 return !operator==( rhs );
4740 }
4741
4742 Extent2D visibleRegion;
4743 uint32_t refreshRate;
4744 };
4745 static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
4746
4747 struct DisplayModePropertiesKHR
4748 {
Lenny Komowbed9b5c2016-08-11 11:23:15 -06004749 operator const VkDisplayModePropertiesKHR&() const
4750 {
4751 return *reinterpret_cast<const VkDisplayModePropertiesKHR*>(this);
4752 }
4753
4754 bool operator==( DisplayModePropertiesKHR const& rhs ) const
4755 {
4756 return ( displayMode == rhs.displayMode )
4757 && ( parameters == rhs.parameters );
4758 }
4759
4760 bool operator!=( DisplayModePropertiesKHR const& rhs ) const
4761 {
4762 return !operator==( rhs );
4763 }
4764
4765 DisplayModeKHR displayMode;
4766 DisplayModeParametersKHR parameters;
4767 };
4768 static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
4769
4770 enum class ImageLayout
4771 {
4772 eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
4773 eGeneral = VK_IMAGE_LAYOUT_GENERAL,
4774 eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
4775 eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
4776 eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
4777 eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
4778 eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4779 eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4780 ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
4781 ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
4782 };
4783
4784 struct DescriptorImageInfo
4785 {
4786 DescriptorImageInfo( Sampler sampler_ = Sampler(), ImageView imageView_ = ImageView(), ImageLayout imageLayout_ = ImageLayout::eUndefined )
4787 : sampler( sampler_ )
4788 , imageView( imageView_ )
4789 , imageLayout( imageLayout_ )
4790 {
4791 }
4792
4793 DescriptorImageInfo( VkDescriptorImageInfo const & rhs )
4794 {
4795 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4796 }
4797
4798 DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs )
4799 {
4800 memcpy( this, &rhs, sizeof(DescriptorImageInfo) );
4801 return *this;
4802 }
4803
4804 DescriptorImageInfo& setSampler( Sampler sampler_ )
4805 {
4806 sampler = sampler_;
4807 return *this;
4808 }
4809
4810 DescriptorImageInfo& setImageView( ImageView imageView_ )
4811 {
4812 imageView = imageView_;
4813 return *this;
4814 }
4815
4816 DescriptorImageInfo& setImageLayout( ImageLayout imageLayout_ )
4817 {
4818 imageLayout = imageLayout_;
4819 return *this;
4820 }
4821
4822 operator const VkDescriptorImageInfo&() const
4823 {
4824 return *reinterpret_cast<const VkDescriptorImageInfo*>(this);
4825 }
4826
4827 bool operator==( DescriptorImageInfo const& rhs ) const
4828 {
4829 return ( sampler == rhs.sampler )
4830 && ( imageView == rhs.imageView )
4831 && ( imageLayout == rhs.imageLayout );
4832 }
4833
4834 bool operator!=( DescriptorImageInfo const& rhs ) const
4835 {
4836 return !operator==( rhs );
4837 }
4838
4839 Sampler sampler;
4840 ImageView imageView;
4841 ImageLayout imageLayout;
4842 };
4843 static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
4844
4845 struct AttachmentReference
4846 {
4847 AttachmentReference( uint32_t attachment_ = 0, ImageLayout layout_ = ImageLayout::eUndefined )
4848 : attachment( attachment_ )
4849 , layout( layout_ )
4850 {
4851 }
4852
4853 AttachmentReference( VkAttachmentReference const & rhs )
4854 {
4855 memcpy( this, &rhs, sizeof(AttachmentReference) );
4856 }
4857
4858 AttachmentReference& operator=( VkAttachmentReference const & rhs )
4859 {
4860 memcpy( this, &rhs, sizeof(AttachmentReference) );
4861 return *this;
4862 }
4863
4864 AttachmentReference& setAttachment( uint32_t attachment_ )
4865 {
4866 attachment = attachment_;
4867 return *this;
4868 }
4869
4870 AttachmentReference& setLayout( ImageLayout layout_ )
4871 {
4872 layout = layout_;
4873 return *this;
4874 }
4875
4876 operator const VkAttachmentReference&() const
4877 {
4878 return *reinterpret_cast<const VkAttachmentReference*>(this);
4879 }
4880
4881 bool operator==( AttachmentReference const& rhs ) const
4882 {
4883 return ( attachment == rhs.attachment )
4884 && ( layout == rhs.layout );
4885 }
4886
4887 bool operator!=( AttachmentReference const& rhs ) const
4888 {
4889 return !operator==( rhs );
4890 }
4891
4892 uint32_t attachment;
4893 ImageLayout layout;
4894 };
4895 static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
4896
4897 enum class AttachmentLoadOp
4898 {
4899 eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
4900 eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
4901 eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE
4902 };
4903
4904 enum class AttachmentStoreOp
4905 {
4906 eStore = VK_ATTACHMENT_STORE_OP_STORE,
4907 eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE
4908 };
4909
4910 enum class ImageType
4911 {
4912 e1D = VK_IMAGE_TYPE_1D,
4913 e2D = VK_IMAGE_TYPE_2D,
4914 e3D = VK_IMAGE_TYPE_3D
4915 };
4916
4917 enum class ImageTiling
4918 {
4919 eOptimal = VK_IMAGE_TILING_OPTIMAL,
4920 eLinear = VK_IMAGE_TILING_LINEAR
4921 };
4922
4923 enum class ImageViewType
4924 {
4925 e1D = VK_IMAGE_VIEW_TYPE_1D,
4926 e2D = VK_IMAGE_VIEW_TYPE_2D,
4927 e3D = VK_IMAGE_VIEW_TYPE_3D,
4928 eCube = VK_IMAGE_VIEW_TYPE_CUBE,
4929 e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
4930 e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
4931 eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
4932 };
4933
4934 enum class CommandBufferLevel
4935 {
4936 ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
4937 eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
4938 };
4939
4940 enum class ComponentSwizzle
4941 {
4942 eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
4943 eZero = VK_COMPONENT_SWIZZLE_ZERO,
4944 eOne = VK_COMPONENT_SWIZZLE_ONE,
4945 eR = VK_COMPONENT_SWIZZLE_R,
4946 eG = VK_COMPONENT_SWIZZLE_G,
4947 eB = VK_COMPONENT_SWIZZLE_B,
4948 eA = VK_COMPONENT_SWIZZLE_A
4949 };
4950
4951 struct ComponentMapping
4952 {
4953 ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity )
4954 : r( r_ )
4955 , g( g_ )
4956 , b( b_ )
4957 , a( a_ )
4958 {
4959 }
4960
4961 ComponentMapping( VkComponentMapping const & rhs )
4962 {
4963 memcpy( this, &rhs, sizeof(ComponentMapping) );
4964 }
4965
4966 ComponentMapping& operator=( VkComponentMapping const & rhs )
4967 {
4968 memcpy( this, &rhs, sizeof(ComponentMapping) );
4969 return *this;
4970 }
4971
4972 ComponentMapping& setR( ComponentSwizzle r_ )
4973 {
4974 r = r_;
4975 return *this;
4976 }
4977
4978 ComponentMapping& setG( ComponentSwizzle g_ )
4979 {
4980 g = g_;
4981 return *this;
4982 }
4983
4984 ComponentMapping& setB( ComponentSwizzle b_ )
4985 {
4986 b = b_;
4987 return *this;
4988 }
4989
4990 ComponentMapping& setA( ComponentSwizzle a_ )
4991 {
4992 a = a_;
4993 return *this;
4994 }
4995
4996 operator const VkComponentMapping&() const
4997 {
4998 return *reinterpret_cast<const VkComponentMapping*>(this);
4999 }
5000
5001 bool operator==( ComponentMapping const& rhs ) const
5002 {
5003 return ( r == rhs.r )
5004 && ( g == rhs.g )
5005 && ( b == rhs.b )
5006 && ( a == rhs.a );
5007 }
5008
5009 bool operator!=( ComponentMapping const& rhs ) const
5010 {
5011 return !operator==( rhs );
5012 }
5013
5014 ComponentSwizzle r;
5015 ComponentSwizzle g;
5016 ComponentSwizzle b;
5017 ComponentSwizzle a;
5018 };
5019 static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
5020
5021 enum class DescriptorType
5022 {
5023 eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
5024 eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5025 eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
5026 eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5027 eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5028 eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5029 eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5030 eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5031 eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
5032 eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
5033 eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
5034 };
5035
5036 struct DescriptorPoolSize
5037 {
5038 DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0 )
5039 : type( type_ )
5040 , descriptorCount( descriptorCount_ )
5041 {
5042 }
5043
5044 DescriptorPoolSize( VkDescriptorPoolSize const & rhs )
5045 {
5046 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
5047 }
5048
5049 DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs )
5050 {
5051 memcpy( this, &rhs, sizeof(DescriptorPoolSize) );
5052 return *this;
5053 }
5054
5055 DescriptorPoolSize& setType( DescriptorType type_ )
5056 {
5057 type = type_;
5058 return *this;
5059 }
5060
5061 DescriptorPoolSize& setDescriptorCount( uint32_t descriptorCount_ )
5062 {
5063 descriptorCount = descriptorCount_;
5064 return *this;
5065 }
5066
5067 operator const VkDescriptorPoolSize&() const
5068 {
5069 return *reinterpret_cast<const VkDescriptorPoolSize*>(this);
5070 }
5071
5072 bool operator==( DescriptorPoolSize const& rhs ) const
5073 {
5074 return ( type == rhs.type )
5075 && ( descriptorCount == rhs.descriptorCount );
5076 }
5077
5078 bool operator!=( DescriptorPoolSize const& rhs ) const
5079 {
5080 return !operator==( rhs );
5081 }
5082
5083 DescriptorType type;
5084 uint32_t descriptorCount;
5085 };
5086 static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
5087
5088 enum class QueryType
5089 {
5090 eOcclusion = VK_QUERY_TYPE_OCCLUSION,
5091 ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
5092 eTimestamp = VK_QUERY_TYPE_TIMESTAMP
5093 };
5094
5095 enum class BorderColor
5096 {
5097 eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
5098 eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
5099 eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
5100 eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
5101 eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
5102 eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE
5103 };
5104
5105 enum class PipelineBindPoint
5106 {
5107 eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
5108 eCompute = VK_PIPELINE_BIND_POINT_COMPUTE
5109 };
5110
5111 struct SubpassDescription
5112 {
5113 SubpassDescription( SubpassDescriptionFlags flags_ = SubpassDescriptionFlags(), PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = 0, const AttachmentReference* pInputAttachments_ = nullptr, uint32_t colorAttachmentCount_ = 0, const AttachmentReference* pColorAttachments_ = nullptr, const AttachmentReference* pResolveAttachments_ = nullptr, const AttachmentReference* pDepthStencilAttachment_ = nullptr, uint32_t preserveAttachmentCount_ = 0, const uint32_t* pPreserveAttachments_ = nullptr )
5114 : flags( flags_ )
5115 , pipelineBindPoint( pipelineBindPoint_ )
5116 , inputAttachmentCount( inputAttachmentCount_ )
5117 , pInputAttachments( pInputAttachments_ )
5118 , colorAttachmentCount( colorAttachmentCount_ )
5119 , pColorAttachments( pColorAttachments_ )
5120 , pResolveAttachments( pResolveAttachments_ )
5121 , pDepthStencilAttachment( pDepthStencilAttachment_ )
5122 , preserveAttachmentCount( preserveAttachmentCount_ )
5123 , pPreserveAttachments( pPreserveAttachments_ )
5124 {
5125 }
5126
5127 SubpassDescription( VkSubpassDescription const & rhs )
5128 {
5129 memcpy( this, &rhs, sizeof(SubpassDescription) );
5130 }
5131
5132 SubpassDescription& operator=( VkSubpassDescription const & rhs )
5133 {
5134 memcpy( this, &rhs, sizeof(SubpassDescription) );
5135 return *this;
5136 }
5137
5138 SubpassDescription& setFlags( SubpassDescriptionFlags flags_ )
5139 {
5140 flags = flags_;
5141 return *this;
5142 }
5143
5144 SubpassDescription& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
5145 {
5146 pipelineBindPoint = pipelineBindPoint_;
5147 return *this;
5148 }
5149
5150 SubpassDescription& setInputAttachmentCount( uint32_t inputAttachmentCount_ )
5151 {
5152 inputAttachmentCount = inputAttachmentCount_;
5153 return *this;
5154 }
5155
5156 SubpassDescription& setPInputAttachments( const AttachmentReference* pInputAttachments_ )
5157 {
5158 pInputAttachments = pInputAttachments_;
5159 return *this;
5160 }
5161
5162 SubpassDescription& setColorAttachmentCount( uint32_t colorAttachmentCount_ )
5163 {
5164 colorAttachmentCount = colorAttachmentCount_;
5165 return *this;
5166 }
5167
5168 SubpassDescription& setPColorAttachments( const AttachmentReference* pColorAttachments_ )
5169 {
5170 pColorAttachments = pColorAttachments_;
5171 return *this;
5172 }
5173
5174 SubpassDescription& setPResolveAttachments( const AttachmentReference* pResolveAttachments_ )
5175 {
5176 pResolveAttachments = pResolveAttachments_;
5177 return *this;
5178 }
5179
5180 SubpassDescription& setPDepthStencilAttachment( const AttachmentReference* pDepthStencilAttachment_ )
5181 {
5182 pDepthStencilAttachment = pDepthStencilAttachment_;
5183 return *this;
5184 }
5185
5186 SubpassDescription& setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ )
5187 {
5188 preserveAttachmentCount = preserveAttachmentCount_;
5189 return *this;
5190 }
5191
5192 SubpassDescription& setPPreserveAttachments( const uint32_t* pPreserveAttachments_ )
5193 {
5194 pPreserveAttachments = pPreserveAttachments_;
5195 return *this;
5196 }
5197
5198 operator const VkSubpassDescription&() const
5199 {
5200 return *reinterpret_cast<const VkSubpassDescription*>(this);
5201 }
5202
5203 bool operator==( SubpassDescription const& rhs ) const
5204 {
5205 return ( flags == rhs.flags )
5206 && ( pipelineBindPoint == rhs.pipelineBindPoint )
5207 && ( inputAttachmentCount == rhs.inputAttachmentCount )
5208 && ( pInputAttachments == rhs.pInputAttachments )
5209 && ( colorAttachmentCount == rhs.colorAttachmentCount )
5210 && ( pColorAttachments == rhs.pColorAttachments )
5211 && ( pResolveAttachments == rhs.pResolveAttachments )
5212 && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
5213 && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
5214 && ( pPreserveAttachments == rhs.pPreserveAttachments );
5215 }
5216
5217 bool operator!=( SubpassDescription const& rhs ) const
5218 {
5219 return !operator==( rhs );
5220 }
5221
5222 SubpassDescriptionFlags flags;
5223 PipelineBindPoint pipelineBindPoint;
5224 uint32_t inputAttachmentCount;
5225 const AttachmentReference* pInputAttachments;
5226 uint32_t colorAttachmentCount;
5227 const AttachmentReference* pColorAttachments;
5228 const AttachmentReference* pResolveAttachments;
5229 const AttachmentReference* pDepthStencilAttachment;
5230 uint32_t preserveAttachmentCount;
5231 const uint32_t* pPreserveAttachments;
5232 };
5233 static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
5234
5235 enum class PipelineCacheHeaderVersion
5236 {
5237 eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
5238 };
5239
5240 enum class PrimitiveTopology
5241 {
5242 ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
5243 eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
5244 eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
5245 eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
5246 eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
5247 eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
5248 eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
5249 eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
5250 eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
5251 eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
5252 ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
5253 };
5254
5255 enum class SharingMode
5256 {
5257 eExclusive = VK_SHARING_MODE_EXCLUSIVE,
5258 eConcurrent = VK_SHARING_MODE_CONCURRENT
5259 };
5260
5261 enum class IndexType
5262 {
5263 eUint16 = VK_INDEX_TYPE_UINT16,
5264 eUint32 = VK_INDEX_TYPE_UINT32
5265 };
5266
5267 enum class Filter
5268 {
5269 eNearest = VK_FILTER_NEAREST,
5270 eLinear = VK_FILTER_LINEAR,
5271 eCubicIMG = VK_FILTER_CUBIC_IMG
5272 };
5273
5274 enum class SamplerMipmapMode
5275 {
5276 eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
5277 eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
5278 };
5279
5280 enum class SamplerAddressMode
5281 {
5282 eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
5283 eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
5284 eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
5285 eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
5286 eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE
5287 };
5288
5289 enum class CompareOp
5290 {
5291 eNever = VK_COMPARE_OP_NEVER,
5292 eLess = VK_COMPARE_OP_LESS,
5293 eEqual = VK_COMPARE_OP_EQUAL,
5294 eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
5295 eGreater = VK_COMPARE_OP_GREATER,
5296 eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
5297 eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
5298 eAlways = VK_COMPARE_OP_ALWAYS
5299 };
5300
5301 enum class PolygonMode
5302 {
5303 eFill = VK_POLYGON_MODE_FILL,
5304 eLine = VK_POLYGON_MODE_LINE,
5305 ePoint = VK_POLYGON_MODE_POINT
5306 };
5307
5308 enum class CullModeFlagBits
5309 {
5310 eNone = VK_CULL_MODE_NONE,
5311 eFront = VK_CULL_MODE_FRONT_BIT,
5312 eBack = VK_CULL_MODE_BACK_BIT,
5313 eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
5314 };
5315
5316 using CullModeFlags = Flags<CullModeFlagBits, VkCullModeFlags>;
5317
Mark Lobodzinski2d589822016-12-12 09:44:34 -07005318 VULKAN_HPP_INLINE CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005319 {
5320 return CullModeFlags( bit0 ) | bit1;
5321 }
5322
Mark Lobodzinski2d589822016-12-12 09:44:34 -07005323 VULKAN_HPP_INLINE CullModeFlags operator~( CullModeFlagBits bits )
5324 {
5325 return ~( CullModeFlags( bits ) );
5326 }
5327
5328 template <> struct FlagTraits<CullModeFlagBits>
5329 {
5330 enum
5331 {
5332 allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack)
5333 };
5334 };
5335
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005336 enum class FrontFace
5337 {
5338 eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
5339 eClockwise = VK_FRONT_FACE_CLOCKWISE
5340 };
5341
5342 enum class BlendFactor
5343 {
5344 eZero = VK_BLEND_FACTOR_ZERO,
5345 eOne = VK_BLEND_FACTOR_ONE,
5346 eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
5347 eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
5348 eDstColor = VK_BLEND_FACTOR_DST_COLOR,
5349 eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
5350 eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
5351 eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
5352 eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
5353 eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
5354 eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
5355 eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
5356 eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
5357 eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
5358 eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
5359 eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
5360 eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
5361 eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
5362 eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
5363 };
5364
5365 enum class BlendOp
5366 {
5367 eAdd = VK_BLEND_OP_ADD,
5368 eSubtract = VK_BLEND_OP_SUBTRACT,
5369 eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
5370 eMin = VK_BLEND_OP_MIN,
5371 eMax = VK_BLEND_OP_MAX
5372 };
5373
5374 enum class StencilOp
5375 {
5376 eKeep = VK_STENCIL_OP_KEEP,
5377 eZero = VK_STENCIL_OP_ZERO,
5378 eReplace = VK_STENCIL_OP_REPLACE,
5379 eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
5380 eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
5381 eInvert = VK_STENCIL_OP_INVERT,
5382 eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
5383 eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
5384 };
5385
5386 struct StencilOpState
5387 {
5388 StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = 0, uint32_t writeMask_ = 0, uint32_t reference_ = 0 )
5389 : failOp( failOp_ )
5390 , passOp( passOp_ )
5391 , depthFailOp( depthFailOp_ )
5392 , compareOp( compareOp_ )
5393 , compareMask( compareMask_ )
5394 , writeMask( writeMask_ )
5395 , reference( reference_ )
5396 {
5397 }
5398
5399 StencilOpState( VkStencilOpState const & rhs )
5400 {
5401 memcpy( this, &rhs, sizeof(StencilOpState) );
5402 }
5403
5404 StencilOpState& operator=( VkStencilOpState const & rhs )
5405 {
5406 memcpy( this, &rhs, sizeof(StencilOpState) );
5407 return *this;
5408 }
5409
5410 StencilOpState& setFailOp( StencilOp failOp_ )
5411 {
5412 failOp = failOp_;
5413 return *this;
5414 }
5415
5416 StencilOpState& setPassOp( StencilOp passOp_ )
5417 {
5418 passOp = passOp_;
5419 return *this;
5420 }
5421
5422 StencilOpState& setDepthFailOp( StencilOp depthFailOp_ )
5423 {
5424 depthFailOp = depthFailOp_;
5425 return *this;
5426 }
5427
5428 StencilOpState& setCompareOp( CompareOp compareOp_ )
5429 {
5430 compareOp = compareOp_;
5431 return *this;
5432 }
5433
5434 StencilOpState& setCompareMask( uint32_t compareMask_ )
5435 {
5436 compareMask = compareMask_;
5437 return *this;
5438 }
5439
5440 StencilOpState& setWriteMask( uint32_t writeMask_ )
5441 {
5442 writeMask = writeMask_;
5443 return *this;
5444 }
5445
5446 StencilOpState& setReference( uint32_t reference_ )
5447 {
5448 reference = reference_;
5449 return *this;
5450 }
5451
5452 operator const VkStencilOpState&() const
5453 {
5454 return *reinterpret_cast<const VkStencilOpState*>(this);
5455 }
5456
5457 bool operator==( StencilOpState const& rhs ) const
5458 {
5459 return ( failOp == rhs.failOp )
5460 && ( passOp == rhs.passOp )
5461 && ( depthFailOp == rhs.depthFailOp )
5462 && ( compareOp == rhs.compareOp )
5463 && ( compareMask == rhs.compareMask )
5464 && ( writeMask == rhs.writeMask )
5465 && ( reference == rhs.reference );
5466 }
5467
5468 bool operator!=( StencilOpState const& rhs ) const
5469 {
5470 return !operator==( rhs );
5471 }
5472
5473 StencilOp failOp;
5474 StencilOp passOp;
5475 StencilOp depthFailOp;
5476 CompareOp compareOp;
5477 uint32_t compareMask;
5478 uint32_t writeMask;
5479 uint32_t reference;
5480 };
5481 static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
5482
5483 enum class LogicOp
5484 {
5485 eClear = VK_LOGIC_OP_CLEAR,
5486 eAnd = VK_LOGIC_OP_AND,
5487 eAndReverse = VK_LOGIC_OP_AND_REVERSE,
5488 eCopy = VK_LOGIC_OP_COPY,
5489 eAndInverted = VK_LOGIC_OP_AND_INVERTED,
5490 eNoOp = VK_LOGIC_OP_NO_OP,
5491 eXor = VK_LOGIC_OP_XOR,
5492 eOr = VK_LOGIC_OP_OR,
5493 eNor = VK_LOGIC_OP_NOR,
5494 eEquivalent = VK_LOGIC_OP_EQUIVALENT,
5495 eInvert = VK_LOGIC_OP_INVERT,
5496 eOrReverse = VK_LOGIC_OP_OR_REVERSE,
5497 eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
5498 eOrInverted = VK_LOGIC_OP_OR_INVERTED,
5499 eNand = VK_LOGIC_OP_NAND,
5500 eSet = VK_LOGIC_OP_SET
5501 };
5502
5503 enum class InternalAllocationType
5504 {
5505 eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
5506 };
5507
5508 enum class SystemAllocationScope
5509 {
5510 eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
5511 eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
5512 eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
5513 eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
5514 eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
5515 };
5516
5517 enum class PhysicalDeviceType
5518 {
5519 eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
5520 eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
5521 eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
5522 eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
5523 eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
5524 };
5525
5526 enum class VertexInputRate
5527 {
5528 eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
5529 eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
5530 };
5531
5532 struct VertexInputBindingDescription
5533 {
5534 VertexInputBindingDescription( uint32_t binding_ = 0, uint32_t stride_ = 0, VertexInputRate inputRate_ = VertexInputRate::eVertex )
5535 : binding( binding_ )
5536 , stride( stride_ )
5537 , inputRate( inputRate_ )
5538 {
5539 }
5540
5541 VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs )
5542 {
5543 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5544 }
5545
5546 VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs )
5547 {
5548 memcpy( this, &rhs, sizeof(VertexInputBindingDescription) );
5549 return *this;
5550 }
5551
5552 VertexInputBindingDescription& setBinding( uint32_t binding_ )
5553 {
5554 binding = binding_;
5555 return *this;
5556 }
5557
5558 VertexInputBindingDescription& setStride( uint32_t stride_ )
5559 {
5560 stride = stride_;
5561 return *this;
5562 }
5563
5564 VertexInputBindingDescription& setInputRate( VertexInputRate inputRate_ )
5565 {
5566 inputRate = inputRate_;
5567 return *this;
5568 }
5569
5570 operator const VkVertexInputBindingDescription&() const
5571 {
5572 return *reinterpret_cast<const VkVertexInputBindingDescription*>(this);
5573 }
5574
5575 bool operator==( VertexInputBindingDescription const& rhs ) const
5576 {
5577 return ( binding == rhs.binding )
5578 && ( stride == rhs.stride )
5579 && ( inputRate == rhs.inputRate );
5580 }
5581
5582 bool operator!=( VertexInputBindingDescription const& rhs ) const
5583 {
5584 return !operator==( rhs );
5585 }
5586
5587 uint32_t binding;
5588 uint32_t stride;
5589 VertexInputRate inputRate;
5590 };
5591 static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
5592
5593 enum class Format
5594 {
5595 eUndefined = VK_FORMAT_UNDEFINED,
5596 eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
5597 eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
5598 eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
5599 eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
5600 eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
5601 eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
5602 eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
5603 eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
5604 eR8Unorm = VK_FORMAT_R8_UNORM,
5605 eR8Snorm = VK_FORMAT_R8_SNORM,
5606 eR8Uscaled = VK_FORMAT_R8_USCALED,
5607 eR8Sscaled = VK_FORMAT_R8_SSCALED,
5608 eR8Uint = VK_FORMAT_R8_UINT,
5609 eR8Sint = VK_FORMAT_R8_SINT,
5610 eR8Srgb = VK_FORMAT_R8_SRGB,
5611 eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
5612 eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
5613 eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
5614 eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
5615 eR8G8Uint = VK_FORMAT_R8G8_UINT,
5616 eR8G8Sint = VK_FORMAT_R8G8_SINT,
5617 eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
5618 eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
5619 eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
5620 eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
5621 eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
5622 eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
5623 eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
5624 eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
5625 eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
5626 eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
5627 eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
5628 eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
5629 eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
5630 eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
5631 eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
5632 eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
5633 eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
5634 eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
5635 eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
5636 eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
5637 eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
5638 eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
5639 eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
5640 eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
5641 eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
5642 eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
5643 eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
5644 eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
5645 eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
5646 eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
5647 eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
5648 eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
5649 eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
5650 eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
5651 eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
5652 eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
5653 eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
5654 eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
5655 eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
5656 eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
5657 eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
5658 eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
5659 eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
5660 eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
5661 eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
5662 eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
5663 eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
5664 eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
5665 eR16Unorm = VK_FORMAT_R16_UNORM,
5666 eR16Snorm = VK_FORMAT_R16_SNORM,
5667 eR16Uscaled = VK_FORMAT_R16_USCALED,
5668 eR16Sscaled = VK_FORMAT_R16_SSCALED,
5669 eR16Uint = VK_FORMAT_R16_UINT,
5670 eR16Sint = VK_FORMAT_R16_SINT,
5671 eR16Sfloat = VK_FORMAT_R16_SFLOAT,
5672 eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
5673 eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
5674 eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
5675 eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
5676 eR16G16Uint = VK_FORMAT_R16G16_UINT,
5677 eR16G16Sint = VK_FORMAT_R16G16_SINT,
5678 eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
5679 eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
5680 eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
5681 eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
5682 eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
5683 eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
5684 eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
5685 eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
5686 eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
5687 eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
5688 eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
5689 eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
5690 eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
5691 eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
5692 eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
5693 eR32Uint = VK_FORMAT_R32_UINT,
5694 eR32Sint = VK_FORMAT_R32_SINT,
5695 eR32Sfloat = VK_FORMAT_R32_SFLOAT,
5696 eR32G32Uint = VK_FORMAT_R32G32_UINT,
5697 eR32G32Sint = VK_FORMAT_R32G32_SINT,
5698 eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
5699 eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
5700 eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
5701 eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
5702 eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
5703 eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
5704 eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
5705 eR64Uint = VK_FORMAT_R64_UINT,
5706 eR64Sint = VK_FORMAT_R64_SINT,
5707 eR64Sfloat = VK_FORMAT_R64_SFLOAT,
5708 eR64G64Uint = VK_FORMAT_R64G64_UINT,
5709 eR64G64Sint = VK_FORMAT_R64G64_SINT,
5710 eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
5711 eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
5712 eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
5713 eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
5714 eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
5715 eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
5716 eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
5717 eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
5718 eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
5719 eD16Unorm = VK_FORMAT_D16_UNORM,
5720 eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
5721 eD32Sfloat = VK_FORMAT_D32_SFLOAT,
5722 eS8Uint = VK_FORMAT_S8_UINT,
5723 eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
5724 eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
5725 eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
5726 eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
5727 eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
5728 eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
5729 eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
5730 eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
5731 eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
5732 eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
5733 eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
5734 eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
5735 eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
5736 eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
5737 eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
5738 eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
5739 eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
5740 eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
5741 eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
5742 eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
5743 eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
5744 eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
5745 eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
5746 eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
5747 eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
5748 eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
5749 eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
5750 eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
5751 eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
5752 eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
5753 eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
5754 eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
5755 eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
5756 eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
5757 eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
5758 eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
5759 eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
5760 eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
5761 eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
5762 eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
5763 eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
5764 eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
5765 eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
5766 eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
5767 eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
5768 eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
5769 eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
5770 eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
5771 eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
5772 eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
5773 eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
5774 eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
5775 eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
5776 eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
5777 eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
5778 eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
Lenny Komowebf33162016-08-26 14:10:08 -06005779 eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
5780 ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
5781 ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
5782 ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
5783 ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
5784 ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
5785 ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
5786 ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
5787 ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005788 };
5789
5790 struct VertexInputAttributeDescription
5791 {
5792 VertexInputAttributeDescription( uint32_t location_ = 0, uint32_t binding_ = 0, Format format_ = Format::eUndefined, uint32_t offset_ = 0 )
5793 : location( location_ )
5794 , binding( binding_ )
5795 , format( format_ )
5796 , offset( offset_ )
5797 {
5798 }
5799
5800 VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs )
5801 {
5802 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5803 }
5804
5805 VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs )
5806 {
5807 memcpy( this, &rhs, sizeof(VertexInputAttributeDescription) );
5808 return *this;
5809 }
5810
5811 VertexInputAttributeDescription& setLocation( uint32_t location_ )
5812 {
5813 location = location_;
5814 return *this;
5815 }
5816
5817 VertexInputAttributeDescription& setBinding( uint32_t binding_ )
5818 {
5819 binding = binding_;
5820 return *this;
5821 }
5822
5823 VertexInputAttributeDescription& setFormat( Format format_ )
5824 {
5825 format = format_;
5826 return *this;
5827 }
5828
5829 VertexInputAttributeDescription& setOffset( uint32_t offset_ )
5830 {
5831 offset = offset_;
5832 return *this;
5833 }
5834
5835 operator const VkVertexInputAttributeDescription&() const
5836 {
5837 return *reinterpret_cast<const VkVertexInputAttributeDescription*>(this);
5838 }
5839
5840 bool operator==( VertexInputAttributeDescription const& rhs ) const
5841 {
5842 return ( location == rhs.location )
5843 && ( binding == rhs.binding )
5844 && ( format == rhs.format )
5845 && ( offset == rhs.offset );
5846 }
5847
5848 bool operator!=( VertexInputAttributeDescription const& rhs ) const
5849 {
5850 return !operator==( rhs );
5851 }
5852
5853 uint32_t location;
5854 uint32_t binding;
5855 Format format;
5856 uint32_t offset;
5857 };
5858 static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
5859
5860 enum class StructureType
5861 {
5862 eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
5863 eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
5864 eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
5865 eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
5866 eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
5867 eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
5868 eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
5869 eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
5870 eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
5871 eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
5872 eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
5873 eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
5874 eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
5875 eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5876 eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
5877 eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5878 eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
5879 ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
5880 ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
5881 ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
5882 ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
5883 ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
5884 ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
5885 ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
5886 ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
5887 ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
5888 ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
5889 ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
5890 eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
5891 eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
5892 ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
5893 eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
5894 eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
5895 eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5896 eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5897 eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5898 eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
5899 eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
5900 eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
5901 eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
5902 eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
5903 eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
5904 eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
5905 eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
5906 eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
5907 eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
5908 eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
5909 eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
5910 eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
5911 eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
5912 ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
5913 eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
5914 eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
5915 eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
5916 eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
5917 eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
5918 eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
5919 eMirSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR,
5920 eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
5921 eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
5922 eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
5923 ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
5924 eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
5925 eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
5926 eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
5927 eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
5928 eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
Lenny Komow6501c122016-08-31 15:03:49 -06005929 eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
5930 eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
5931 eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
5932 eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
5933 eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
Lenny Komow68432d72016-09-29 14:16:59 -06005934 eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
Mark Young39389872017-01-19 21:10:49 -07005935 ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
5936 ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
5937 eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
5938 eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
5939 ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
5940 eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
5941 ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
5942 eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
5943 ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
Mark Lobodzinski2d589822016-12-12 09:44:34 -07005944 eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
Mark Young39389872017-01-19 21:10:49 -07005945 eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
Mark Lobodzinski2d589822016-12-12 09:44:34 -07005946 eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX,
5947 eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX,
5948 eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX,
5949 eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX,
5950 eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX,
Mark Young39389872017-01-19 21:10:49 -07005951 eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX,
5952 eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT,
5953 eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
5954 eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
5955 eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
5956 eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005957 };
5958
5959 struct ApplicationInfo
5960 {
5961 ApplicationInfo( const char* pApplicationName_ = nullptr, uint32_t applicationVersion_ = 0, const char* pEngineName_ = nullptr, uint32_t engineVersion_ = 0, uint32_t apiVersion_ = 0 )
5962 : sType( StructureType::eApplicationInfo )
5963 , pNext( nullptr )
5964 , pApplicationName( pApplicationName_ )
5965 , applicationVersion( applicationVersion_ )
5966 , pEngineName( pEngineName_ )
5967 , engineVersion( engineVersion_ )
5968 , apiVersion( apiVersion_ )
5969 {
5970 }
5971
5972 ApplicationInfo( VkApplicationInfo const & rhs )
5973 {
5974 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5975 }
5976
5977 ApplicationInfo& operator=( VkApplicationInfo const & rhs )
5978 {
5979 memcpy( this, &rhs, sizeof(ApplicationInfo) );
5980 return *this;
5981 }
5982
Lenny Komowbed9b5c2016-08-11 11:23:15 -06005983 ApplicationInfo& setPNext( const void* pNext_ )
5984 {
5985 pNext = pNext_;
5986 return *this;
5987 }
5988
5989 ApplicationInfo& setPApplicationName( const char* pApplicationName_ )
5990 {
5991 pApplicationName = pApplicationName_;
5992 return *this;
5993 }
5994
5995 ApplicationInfo& setApplicationVersion( uint32_t applicationVersion_ )
5996 {
5997 applicationVersion = applicationVersion_;
5998 return *this;
5999 }
6000
6001 ApplicationInfo& setPEngineName( const char* pEngineName_ )
6002 {
6003 pEngineName = pEngineName_;
6004 return *this;
6005 }
6006
6007 ApplicationInfo& setEngineVersion( uint32_t engineVersion_ )
6008 {
6009 engineVersion = engineVersion_;
6010 return *this;
6011 }
6012
6013 ApplicationInfo& setApiVersion( uint32_t apiVersion_ )
6014 {
6015 apiVersion = apiVersion_;
6016 return *this;
6017 }
6018
6019 operator const VkApplicationInfo&() const
6020 {
6021 return *reinterpret_cast<const VkApplicationInfo*>(this);
6022 }
6023
6024 bool operator==( ApplicationInfo const& rhs ) const
6025 {
6026 return ( sType == rhs.sType )
6027 && ( pNext == rhs.pNext )
6028 && ( pApplicationName == rhs.pApplicationName )
6029 && ( applicationVersion == rhs.applicationVersion )
6030 && ( pEngineName == rhs.pEngineName )
6031 && ( engineVersion == rhs.engineVersion )
6032 && ( apiVersion == rhs.apiVersion );
6033 }
6034
6035 bool operator!=( ApplicationInfo const& rhs ) const
6036 {
6037 return !operator==( rhs );
6038 }
6039
6040 private:
6041 StructureType sType;
6042
6043 public:
6044 const void* pNext;
6045 const char* pApplicationName;
6046 uint32_t applicationVersion;
6047 const char* pEngineName;
6048 uint32_t engineVersion;
6049 uint32_t apiVersion;
6050 };
6051 static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
6052
6053 struct DeviceQueueCreateInfo
6054 {
6055 DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = DeviceQueueCreateFlags(), uint32_t queueFamilyIndex_ = 0, uint32_t queueCount_ = 0, const float* pQueuePriorities_ = nullptr )
6056 : sType( StructureType::eDeviceQueueCreateInfo )
6057 , pNext( nullptr )
6058 , flags( flags_ )
6059 , queueFamilyIndex( queueFamilyIndex_ )
6060 , queueCount( queueCount_ )
6061 , pQueuePriorities( pQueuePriorities_ )
6062 {
6063 }
6064
6065 DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs )
6066 {
6067 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
6068 }
6069
6070 DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs )
6071 {
6072 memcpy( this, &rhs, sizeof(DeviceQueueCreateInfo) );
6073 return *this;
6074 }
6075
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006076 DeviceQueueCreateInfo& setPNext( const void* pNext_ )
6077 {
6078 pNext = pNext_;
6079 return *this;
6080 }
6081
6082 DeviceQueueCreateInfo& setFlags( DeviceQueueCreateFlags flags_ )
6083 {
6084 flags = flags_;
6085 return *this;
6086 }
6087
6088 DeviceQueueCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
6089 {
6090 queueFamilyIndex = queueFamilyIndex_;
6091 return *this;
6092 }
6093
6094 DeviceQueueCreateInfo& setQueueCount( uint32_t queueCount_ )
6095 {
6096 queueCount = queueCount_;
6097 return *this;
6098 }
6099
6100 DeviceQueueCreateInfo& setPQueuePriorities( const float* pQueuePriorities_ )
6101 {
6102 pQueuePriorities = pQueuePriorities_;
6103 return *this;
6104 }
6105
6106 operator const VkDeviceQueueCreateInfo&() const
6107 {
6108 return *reinterpret_cast<const VkDeviceQueueCreateInfo*>(this);
6109 }
6110
6111 bool operator==( DeviceQueueCreateInfo const& rhs ) const
6112 {
6113 return ( sType == rhs.sType )
6114 && ( pNext == rhs.pNext )
6115 && ( flags == rhs.flags )
6116 && ( queueFamilyIndex == rhs.queueFamilyIndex )
6117 && ( queueCount == rhs.queueCount )
6118 && ( pQueuePriorities == rhs.pQueuePriorities );
6119 }
6120
6121 bool operator!=( DeviceQueueCreateInfo const& rhs ) const
6122 {
6123 return !operator==( rhs );
6124 }
6125
6126 private:
6127 StructureType sType;
6128
6129 public:
6130 const void* pNext;
6131 DeviceQueueCreateFlags flags;
6132 uint32_t queueFamilyIndex;
6133 uint32_t queueCount;
6134 const float* pQueuePriorities;
6135 };
6136 static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
6137
6138 struct DeviceCreateInfo
6139 {
6140 DeviceCreateInfo( DeviceCreateFlags flags_ = DeviceCreateFlags(), uint32_t queueCreateInfoCount_ = 0, const DeviceQueueCreateInfo* pQueueCreateInfos_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr, const PhysicalDeviceFeatures* pEnabledFeatures_ = nullptr )
6141 : sType( StructureType::eDeviceCreateInfo )
6142 , pNext( nullptr )
6143 , flags( flags_ )
6144 , queueCreateInfoCount( queueCreateInfoCount_ )
6145 , pQueueCreateInfos( pQueueCreateInfos_ )
6146 , enabledLayerCount( enabledLayerCount_ )
6147 , ppEnabledLayerNames( ppEnabledLayerNames_ )
6148 , enabledExtensionCount( enabledExtensionCount_ )
6149 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
6150 , pEnabledFeatures( pEnabledFeatures_ )
6151 {
6152 }
6153
6154 DeviceCreateInfo( VkDeviceCreateInfo const & rhs )
6155 {
6156 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
6157 }
6158
6159 DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs )
6160 {
6161 memcpy( this, &rhs, sizeof(DeviceCreateInfo) );
6162 return *this;
6163 }
6164
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006165 DeviceCreateInfo& setPNext( const void* pNext_ )
6166 {
6167 pNext = pNext_;
6168 return *this;
6169 }
6170
6171 DeviceCreateInfo& setFlags( DeviceCreateFlags flags_ )
6172 {
6173 flags = flags_;
6174 return *this;
6175 }
6176
6177 DeviceCreateInfo& setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ )
6178 {
6179 queueCreateInfoCount = queueCreateInfoCount_;
6180 return *this;
6181 }
6182
6183 DeviceCreateInfo& setPQueueCreateInfos( const DeviceQueueCreateInfo* pQueueCreateInfos_ )
6184 {
6185 pQueueCreateInfos = pQueueCreateInfos_;
6186 return *this;
6187 }
6188
6189 DeviceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
6190 {
6191 enabledLayerCount = enabledLayerCount_;
6192 return *this;
6193 }
6194
6195 DeviceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
6196 {
6197 ppEnabledLayerNames = ppEnabledLayerNames_;
6198 return *this;
6199 }
6200
6201 DeviceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
6202 {
6203 enabledExtensionCount = enabledExtensionCount_;
6204 return *this;
6205 }
6206
6207 DeviceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
6208 {
6209 ppEnabledExtensionNames = ppEnabledExtensionNames_;
6210 return *this;
6211 }
6212
6213 DeviceCreateInfo& setPEnabledFeatures( const PhysicalDeviceFeatures* pEnabledFeatures_ )
6214 {
6215 pEnabledFeatures = pEnabledFeatures_;
6216 return *this;
6217 }
6218
6219 operator const VkDeviceCreateInfo&() const
6220 {
6221 return *reinterpret_cast<const VkDeviceCreateInfo*>(this);
6222 }
6223
6224 bool operator==( DeviceCreateInfo const& rhs ) const
6225 {
6226 return ( sType == rhs.sType )
6227 && ( pNext == rhs.pNext )
6228 && ( flags == rhs.flags )
6229 && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
6230 && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
6231 && ( enabledLayerCount == rhs.enabledLayerCount )
6232 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
6233 && ( enabledExtensionCount == rhs.enabledExtensionCount )
6234 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
6235 && ( pEnabledFeatures == rhs.pEnabledFeatures );
6236 }
6237
6238 bool operator!=( DeviceCreateInfo const& rhs ) const
6239 {
6240 return !operator==( rhs );
6241 }
6242
6243 private:
6244 StructureType sType;
6245
6246 public:
6247 const void* pNext;
6248 DeviceCreateFlags flags;
6249 uint32_t queueCreateInfoCount;
6250 const DeviceQueueCreateInfo* pQueueCreateInfos;
6251 uint32_t enabledLayerCount;
6252 const char* const* ppEnabledLayerNames;
6253 uint32_t enabledExtensionCount;
6254 const char* const* ppEnabledExtensionNames;
6255 const PhysicalDeviceFeatures* pEnabledFeatures;
6256 };
6257 static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
6258
6259 struct InstanceCreateInfo
6260 {
6261 InstanceCreateInfo( InstanceCreateFlags flags_ = InstanceCreateFlags(), const ApplicationInfo* pApplicationInfo_ = nullptr, uint32_t enabledLayerCount_ = 0, const char* const* ppEnabledLayerNames_ = nullptr, uint32_t enabledExtensionCount_ = 0, const char* const* ppEnabledExtensionNames_ = nullptr )
6262 : sType( StructureType::eInstanceCreateInfo )
6263 , pNext( nullptr )
6264 , flags( flags_ )
6265 , pApplicationInfo( pApplicationInfo_ )
6266 , enabledLayerCount( enabledLayerCount_ )
6267 , ppEnabledLayerNames( ppEnabledLayerNames_ )
6268 , enabledExtensionCount( enabledExtensionCount_ )
6269 , ppEnabledExtensionNames( ppEnabledExtensionNames_ )
6270 {
6271 }
6272
6273 InstanceCreateInfo( VkInstanceCreateInfo const & rhs )
6274 {
6275 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
6276 }
6277
6278 InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs )
6279 {
6280 memcpy( this, &rhs, sizeof(InstanceCreateInfo) );
6281 return *this;
6282 }
6283
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006284 InstanceCreateInfo& setPNext( const void* pNext_ )
6285 {
6286 pNext = pNext_;
6287 return *this;
6288 }
6289
6290 InstanceCreateInfo& setFlags( InstanceCreateFlags flags_ )
6291 {
6292 flags = flags_;
6293 return *this;
6294 }
6295
6296 InstanceCreateInfo& setPApplicationInfo( const ApplicationInfo* pApplicationInfo_ )
6297 {
6298 pApplicationInfo = pApplicationInfo_;
6299 return *this;
6300 }
6301
6302 InstanceCreateInfo& setEnabledLayerCount( uint32_t enabledLayerCount_ )
6303 {
6304 enabledLayerCount = enabledLayerCount_;
6305 return *this;
6306 }
6307
6308 InstanceCreateInfo& setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ )
6309 {
6310 ppEnabledLayerNames = ppEnabledLayerNames_;
6311 return *this;
6312 }
6313
6314 InstanceCreateInfo& setEnabledExtensionCount( uint32_t enabledExtensionCount_ )
6315 {
6316 enabledExtensionCount = enabledExtensionCount_;
6317 return *this;
6318 }
6319
6320 InstanceCreateInfo& setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ )
6321 {
6322 ppEnabledExtensionNames = ppEnabledExtensionNames_;
6323 return *this;
6324 }
6325
6326 operator const VkInstanceCreateInfo&() const
6327 {
6328 return *reinterpret_cast<const VkInstanceCreateInfo*>(this);
6329 }
6330
6331 bool operator==( InstanceCreateInfo const& rhs ) const
6332 {
6333 return ( sType == rhs.sType )
6334 && ( pNext == rhs.pNext )
6335 && ( flags == rhs.flags )
6336 && ( pApplicationInfo == rhs.pApplicationInfo )
6337 && ( enabledLayerCount == rhs.enabledLayerCount )
6338 && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
6339 && ( enabledExtensionCount == rhs.enabledExtensionCount )
6340 && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
6341 }
6342
6343 bool operator!=( InstanceCreateInfo const& rhs ) const
6344 {
6345 return !operator==( rhs );
6346 }
6347
6348 private:
6349 StructureType sType;
6350
6351 public:
6352 const void* pNext;
6353 InstanceCreateFlags flags;
6354 const ApplicationInfo* pApplicationInfo;
6355 uint32_t enabledLayerCount;
6356 const char* const* ppEnabledLayerNames;
6357 uint32_t enabledExtensionCount;
6358 const char* const* ppEnabledExtensionNames;
6359 };
6360 static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
6361
6362 struct MemoryAllocateInfo
6363 {
6364 MemoryAllocateInfo( DeviceSize allocationSize_ = 0, uint32_t memoryTypeIndex_ = 0 )
6365 : sType( StructureType::eMemoryAllocateInfo )
6366 , pNext( nullptr )
6367 , allocationSize( allocationSize_ )
6368 , memoryTypeIndex( memoryTypeIndex_ )
6369 {
6370 }
6371
6372 MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs )
6373 {
6374 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
6375 }
6376
6377 MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs )
6378 {
6379 memcpy( this, &rhs, sizeof(MemoryAllocateInfo) );
6380 return *this;
6381 }
6382
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006383 MemoryAllocateInfo& setPNext( const void* pNext_ )
6384 {
6385 pNext = pNext_;
6386 return *this;
6387 }
6388
6389 MemoryAllocateInfo& setAllocationSize( DeviceSize allocationSize_ )
6390 {
6391 allocationSize = allocationSize_;
6392 return *this;
6393 }
6394
6395 MemoryAllocateInfo& setMemoryTypeIndex( uint32_t memoryTypeIndex_ )
6396 {
6397 memoryTypeIndex = memoryTypeIndex_;
6398 return *this;
6399 }
6400
6401 operator const VkMemoryAllocateInfo&() const
6402 {
6403 return *reinterpret_cast<const VkMemoryAllocateInfo*>(this);
6404 }
6405
6406 bool operator==( MemoryAllocateInfo const& rhs ) const
6407 {
6408 return ( sType == rhs.sType )
6409 && ( pNext == rhs.pNext )
6410 && ( allocationSize == rhs.allocationSize )
6411 && ( memoryTypeIndex == rhs.memoryTypeIndex );
6412 }
6413
6414 bool operator!=( MemoryAllocateInfo const& rhs ) const
6415 {
6416 return !operator==( rhs );
6417 }
6418
6419 private:
6420 StructureType sType;
6421
6422 public:
6423 const void* pNext;
6424 DeviceSize allocationSize;
6425 uint32_t memoryTypeIndex;
6426 };
6427 static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
6428
6429 struct MappedMemoryRange
6430 {
6431 MappedMemoryRange( DeviceMemory memory_ = DeviceMemory(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
6432 : sType( StructureType::eMappedMemoryRange )
6433 , pNext( nullptr )
6434 , memory( memory_ )
6435 , offset( offset_ )
6436 , size( size_ )
6437 {
6438 }
6439
6440 MappedMemoryRange( VkMappedMemoryRange const & rhs )
6441 {
6442 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6443 }
6444
6445 MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs )
6446 {
6447 memcpy( this, &rhs, sizeof(MappedMemoryRange) );
6448 return *this;
6449 }
6450
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006451 MappedMemoryRange& setPNext( const void* pNext_ )
6452 {
6453 pNext = pNext_;
6454 return *this;
6455 }
6456
6457 MappedMemoryRange& setMemory( DeviceMemory memory_ )
6458 {
6459 memory = memory_;
6460 return *this;
6461 }
6462
6463 MappedMemoryRange& setOffset( DeviceSize offset_ )
6464 {
6465 offset = offset_;
6466 return *this;
6467 }
6468
6469 MappedMemoryRange& setSize( DeviceSize size_ )
6470 {
6471 size = size_;
6472 return *this;
6473 }
6474
6475 operator const VkMappedMemoryRange&() const
6476 {
6477 return *reinterpret_cast<const VkMappedMemoryRange*>(this);
6478 }
6479
6480 bool operator==( MappedMemoryRange const& rhs ) const
6481 {
6482 return ( sType == rhs.sType )
6483 && ( pNext == rhs.pNext )
6484 && ( memory == rhs.memory )
6485 && ( offset == rhs.offset )
6486 && ( size == rhs.size );
6487 }
6488
6489 bool operator!=( MappedMemoryRange const& rhs ) const
6490 {
6491 return !operator==( rhs );
6492 }
6493
6494 private:
6495 StructureType sType;
6496
6497 public:
6498 const void* pNext;
6499 DeviceMemory memory;
6500 DeviceSize offset;
6501 DeviceSize size;
6502 };
6503 static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
6504
6505 struct WriteDescriptorSet
6506 {
6507 WriteDescriptorSet( DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, const DescriptorImageInfo* pImageInfo_ = nullptr, const DescriptorBufferInfo* pBufferInfo_ = nullptr, const BufferView* pTexelBufferView_ = nullptr )
6508 : sType( StructureType::eWriteDescriptorSet )
6509 , pNext( nullptr )
6510 , dstSet( dstSet_ )
6511 , dstBinding( dstBinding_ )
6512 , dstArrayElement( dstArrayElement_ )
6513 , descriptorCount( descriptorCount_ )
6514 , descriptorType( descriptorType_ )
6515 , pImageInfo( pImageInfo_ )
6516 , pBufferInfo( pBufferInfo_ )
6517 , pTexelBufferView( pTexelBufferView_ )
6518 {
6519 }
6520
6521 WriteDescriptorSet( VkWriteDescriptorSet const & rhs )
6522 {
6523 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6524 }
6525
6526 WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs )
6527 {
6528 memcpy( this, &rhs, sizeof(WriteDescriptorSet) );
6529 return *this;
6530 }
6531
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006532 WriteDescriptorSet& setPNext( const void* pNext_ )
6533 {
6534 pNext = pNext_;
6535 return *this;
6536 }
6537
6538 WriteDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6539 {
6540 dstSet = dstSet_;
6541 return *this;
6542 }
6543
6544 WriteDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6545 {
6546 dstBinding = dstBinding_;
6547 return *this;
6548 }
6549
6550 WriteDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6551 {
6552 dstArrayElement = dstArrayElement_;
6553 return *this;
6554 }
6555
6556 WriteDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6557 {
6558 descriptorCount = descriptorCount_;
6559 return *this;
6560 }
6561
6562 WriteDescriptorSet& setDescriptorType( DescriptorType descriptorType_ )
6563 {
6564 descriptorType = descriptorType_;
6565 return *this;
6566 }
6567
6568 WriteDescriptorSet& setPImageInfo( const DescriptorImageInfo* pImageInfo_ )
6569 {
6570 pImageInfo = pImageInfo_;
6571 return *this;
6572 }
6573
6574 WriteDescriptorSet& setPBufferInfo( const DescriptorBufferInfo* pBufferInfo_ )
6575 {
6576 pBufferInfo = pBufferInfo_;
6577 return *this;
6578 }
6579
6580 WriteDescriptorSet& setPTexelBufferView( const BufferView* pTexelBufferView_ )
6581 {
6582 pTexelBufferView = pTexelBufferView_;
6583 return *this;
6584 }
6585
6586 operator const VkWriteDescriptorSet&() const
6587 {
6588 return *reinterpret_cast<const VkWriteDescriptorSet*>(this);
6589 }
6590
6591 bool operator==( WriteDescriptorSet const& rhs ) const
6592 {
6593 return ( sType == rhs.sType )
6594 && ( pNext == rhs.pNext )
6595 && ( dstSet == rhs.dstSet )
6596 && ( dstBinding == rhs.dstBinding )
6597 && ( dstArrayElement == rhs.dstArrayElement )
6598 && ( descriptorCount == rhs.descriptorCount )
6599 && ( descriptorType == rhs.descriptorType )
6600 && ( pImageInfo == rhs.pImageInfo )
6601 && ( pBufferInfo == rhs.pBufferInfo )
6602 && ( pTexelBufferView == rhs.pTexelBufferView );
6603 }
6604
6605 bool operator!=( WriteDescriptorSet const& rhs ) const
6606 {
6607 return !operator==( rhs );
6608 }
6609
6610 private:
6611 StructureType sType;
6612
6613 public:
6614 const void* pNext;
6615 DescriptorSet dstSet;
6616 uint32_t dstBinding;
6617 uint32_t dstArrayElement;
6618 uint32_t descriptorCount;
6619 DescriptorType descriptorType;
6620 const DescriptorImageInfo* pImageInfo;
6621 const DescriptorBufferInfo* pBufferInfo;
6622 const BufferView* pTexelBufferView;
6623 };
6624 static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
6625
6626 struct CopyDescriptorSet
6627 {
6628 CopyDescriptorSet( DescriptorSet srcSet_ = DescriptorSet(), uint32_t srcBinding_ = 0, uint32_t srcArrayElement_ = 0, DescriptorSet dstSet_ = DescriptorSet(), uint32_t dstBinding_ = 0, uint32_t dstArrayElement_ = 0, uint32_t descriptorCount_ = 0 )
6629 : sType( StructureType::eCopyDescriptorSet )
6630 , pNext( nullptr )
6631 , srcSet( srcSet_ )
6632 , srcBinding( srcBinding_ )
6633 , srcArrayElement( srcArrayElement_ )
6634 , dstSet( dstSet_ )
6635 , dstBinding( dstBinding_ )
6636 , dstArrayElement( dstArrayElement_ )
6637 , descriptorCount( descriptorCount_ )
6638 {
6639 }
6640
6641 CopyDescriptorSet( VkCopyDescriptorSet const & rhs )
6642 {
6643 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6644 }
6645
6646 CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs )
6647 {
6648 memcpy( this, &rhs, sizeof(CopyDescriptorSet) );
6649 return *this;
6650 }
6651
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006652 CopyDescriptorSet& setPNext( const void* pNext_ )
6653 {
6654 pNext = pNext_;
6655 return *this;
6656 }
6657
6658 CopyDescriptorSet& setSrcSet( DescriptorSet srcSet_ )
6659 {
6660 srcSet = srcSet_;
6661 return *this;
6662 }
6663
6664 CopyDescriptorSet& setSrcBinding( uint32_t srcBinding_ )
6665 {
6666 srcBinding = srcBinding_;
6667 return *this;
6668 }
6669
6670 CopyDescriptorSet& setSrcArrayElement( uint32_t srcArrayElement_ )
6671 {
6672 srcArrayElement = srcArrayElement_;
6673 return *this;
6674 }
6675
6676 CopyDescriptorSet& setDstSet( DescriptorSet dstSet_ )
6677 {
6678 dstSet = dstSet_;
6679 return *this;
6680 }
6681
6682 CopyDescriptorSet& setDstBinding( uint32_t dstBinding_ )
6683 {
6684 dstBinding = dstBinding_;
6685 return *this;
6686 }
6687
6688 CopyDescriptorSet& setDstArrayElement( uint32_t dstArrayElement_ )
6689 {
6690 dstArrayElement = dstArrayElement_;
6691 return *this;
6692 }
6693
6694 CopyDescriptorSet& setDescriptorCount( uint32_t descriptorCount_ )
6695 {
6696 descriptorCount = descriptorCount_;
6697 return *this;
6698 }
6699
6700 operator const VkCopyDescriptorSet&() const
6701 {
6702 return *reinterpret_cast<const VkCopyDescriptorSet*>(this);
6703 }
6704
6705 bool operator==( CopyDescriptorSet const& rhs ) const
6706 {
6707 return ( sType == rhs.sType )
6708 && ( pNext == rhs.pNext )
6709 && ( srcSet == rhs.srcSet )
6710 && ( srcBinding == rhs.srcBinding )
6711 && ( srcArrayElement == rhs.srcArrayElement )
6712 && ( dstSet == rhs.dstSet )
6713 && ( dstBinding == rhs.dstBinding )
6714 && ( dstArrayElement == rhs.dstArrayElement )
6715 && ( descriptorCount == rhs.descriptorCount );
6716 }
6717
6718 bool operator!=( CopyDescriptorSet const& rhs ) const
6719 {
6720 return !operator==( rhs );
6721 }
6722
6723 private:
6724 StructureType sType;
6725
6726 public:
6727 const void* pNext;
6728 DescriptorSet srcSet;
6729 uint32_t srcBinding;
6730 uint32_t srcArrayElement;
6731 DescriptorSet dstSet;
6732 uint32_t dstBinding;
6733 uint32_t dstArrayElement;
6734 uint32_t descriptorCount;
6735 };
6736 static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
6737
6738 struct BufferViewCreateInfo
6739 {
6740 BufferViewCreateInfo( BufferViewCreateFlags flags_ = BufferViewCreateFlags(), Buffer buffer_ = Buffer(), Format format_ = Format::eUndefined, DeviceSize offset_ = 0, DeviceSize range_ = 0 )
6741 : sType( StructureType::eBufferViewCreateInfo )
6742 , pNext( nullptr )
6743 , flags( flags_ )
6744 , buffer( buffer_ )
6745 , format( format_ )
6746 , offset( offset_ )
6747 , range( range_ )
6748 {
6749 }
6750
6751 BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs )
6752 {
6753 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6754 }
6755
6756 BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs )
6757 {
6758 memcpy( this, &rhs, sizeof(BufferViewCreateInfo) );
6759 return *this;
6760 }
6761
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006762 BufferViewCreateInfo& setPNext( const void* pNext_ )
6763 {
6764 pNext = pNext_;
6765 return *this;
6766 }
6767
6768 BufferViewCreateInfo& setFlags( BufferViewCreateFlags flags_ )
6769 {
6770 flags = flags_;
6771 return *this;
6772 }
6773
6774 BufferViewCreateInfo& setBuffer( Buffer buffer_ )
6775 {
6776 buffer = buffer_;
6777 return *this;
6778 }
6779
6780 BufferViewCreateInfo& setFormat( Format format_ )
6781 {
6782 format = format_;
6783 return *this;
6784 }
6785
6786 BufferViewCreateInfo& setOffset( DeviceSize offset_ )
6787 {
6788 offset = offset_;
6789 return *this;
6790 }
6791
6792 BufferViewCreateInfo& setRange( DeviceSize range_ )
6793 {
6794 range = range_;
6795 return *this;
6796 }
6797
6798 operator const VkBufferViewCreateInfo&() const
6799 {
6800 return *reinterpret_cast<const VkBufferViewCreateInfo*>(this);
6801 }
6802
6803 bool operator==( BufferViewCreateInfo const& rhs ) const
6804 {
6805 return ( sType == rhs.sType )
6806 && ( pNext == rhs.pNext )
6807 && ( flags == rhs.flags )
6808 && ( buffer == rhs.buffer )
6809 && ( format == rhs.format )
6810 && ( offset == rhs.offset )
6811 && ( range == rhs.range );
6812 }
6813
6814 bool operator!=( BufferViewCreateInfo const& rhs ) const
6815 {
6816 return !operator==( rhs );
6817 }
6818
6819 private:
6820 StructureType sType;
6821
6822 public:
6823 const void* pNext;
6824 BufferViewCreateFlags flags;
6825 Buffer buffer;
6826 Format format;
6827 DeviceSize offset;
6828 DeviceSize range;
6829 };
6830 static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
6831
6832 struct ShaderModuleCreateInfo
6833 {
6834 ShaderModuleCreateInfo( ShaderModuleCreateFlags flags_ = ShaderModuleCreateFlags(), size_t codeSize_ = 0, const uint32_t* pCode_ = nullptr )
6835 : sType( StructureType::eShaderModuleCreateInfo )
6836 , pNext( nullptr )
6837 , flags( flags_ )
6838 , codeSize( codeSize_ )
6839 , pCode( pCode_ )
6840 {
6841 }
6842
6843 ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs )
6844 {
6845 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6846 }
6847
6848 ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs )
6849 {
6850 memcpy( this, &rhs, sizeof(ShaderModuleCreateInfo) );
6851 return *this;
6852 }
6853
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006854 ShaderModuleCreateInfo& setPNext( const void* pNext_ )
6855 {
6856 pNext = pNext_;
6857 return *this;
6858 }
6859
6860 ShaderModuleCreateInfo& setFlags( ShaderModuleCreateFlags flags_ )
6861 {
6862 flags = flags_;
6863 return *this;
6864 }
6865
6866 ShaderModuleCreateInfo& setCodeSize( size_t codeSize_ )
6867 {
6868 codeSize = codeSize_;
6869 return *this;
6870 }
6871
6872 ShaderModuleCreateInfo& setPCode( const uint32_t* pCode_ )
6873 {
6874 pCode = pCode_;
6875 return *this;
6876 }
6877
6878 operator const VkShaderModuleCreateInfo&() const
6879 {
6880 return *reinterpret_cast<const VkShaderModuleCreateInfo*>(this);
6881 }
6882
6883 bool operator==( ShaderModuleCreateInfo const& rhs ) const
6884 {
6885 return ( sType == rhs.sType )
6886 && ( pNext == rhs.pNext )
6887 && ( flags == rhs.flags )
6888 && ( codeSize == rhs.codeSize )
6889 && ( pCode == rhs.pCode );
6890 }
6891
6892 bool operator!=( ShaderModuleCreateInfo const& rhs ) const
6893 {
6894 return !operator==( rhs );
6895 }
6896
6897 private:
6898 StructureType sType;
6899
6900 public:
6901 const void* pNext;
6902 ShaderModuleCreateFlags flags;
6903 size_t codeSize;
6904 const uint32_t* pCode;
6905 };
6906 static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
6907
6908 struct DescriptorSetAllocateInfo
6909 {
6910 DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = DescriptorPool(), uint32_t descriptorSetCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr )
6911 : sType( StructureType::eDescriptorSetAllocateInfo )
6912 , pNext( nullptr )
6913 , descriptorPool( descriptorPool_ )
6914 , descriptorSetCount( descriptorSetCount_ )
6915 , pSetLayouts( pSetLayouts_ )
6916 {
6917 }
6918
6919 DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs )
6920 {
6921 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6922 }
6923
6924 DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs )
6925 {
6926 memcpy( this, &rhs, sizeof(DescriptorSetAllocateInfo) );
6927 return *this;
6928 }
6929
Lenny Komowbed9b5c2016-08-11 11:23:15 -06006930 DescriptorSetAllocateInfo& setPNext( const void* pNext_ )
6931 {
6932 pNext = pNext_;
6933 return *this;
6934 }
6935
6936 DescriptorSetAllocateInfo& setDescriptorPool( DescriptorPool descriptorPool_ )
6937 {
6938 descriptorPool = descriptorPool_;
6939 return *this;
6940 }
6941
6942 DescriptorSetAllocateInfo& setDescriptorSetCount( uint32_t descriptorSetCount_ )
6943 {
6944 descriptorSetCount = descriptorSetCount_;
6945 return *this;
6946 }
6947
6948 DescriptorSetAllocateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
6949 {
6950 pSetLayouts = pSetLayouts_;
6951 return *this;
6952 }
6953
6954 operator const VkDescriptorSetAllocateInfo&() const
6955 {
6956 return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>(this);
6957 }
6958
6959 bool operator==( DescriptorSetAllocateInfo const& rhs ) const
6960 {
6961 return ( sType == rhs.sType )
6962 && ( pNext == rhs.pNext )
6963 && ( descriptorPool == rhs.descriptorPool )
6964 && ( descriptorSetCount == rhs.descriptorSetCount )
6965 && ( pSetLayouts == rhs.pSetLayouts );
6966 }
6967
6968 bool operator!=( DescriptorSetAllocateInfo const& rhs ) const
6969 {
6970 return !operator==( rhs );
6971 }
6972
6973 private:
6974 StructureType sType;
6975
6976 public:
6977 const void* pNext;
6978 DescriptorPool descriptorPool;
6979 uint32_t descriptorSetCount;
6980 const DescriptorSetLayout* pSetLayouts;
6981 };
6982 static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
6983
6984 struct PipelineVertexInputStateCreateInfo
6985 {
6986 PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = PipelineVertexInputStateCreateFlags(), uint32_t vertexBindingDescriptionCount_ = 0, const VertexInputBindingDescription* pVertexBindingDescriptions_ = nullptr, uint32_t vertexAttributeDescriptionCount_ = 0, const VertexInputAttributeDescription* pVertexAttributeDescriptions_ = nullptr )
6987 : sType( StructureType::ePipelineVertexInputStateCreateInfo )
6988 , pNext( nullptr )
6989 , flags( flags_ )
6990 , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ )
6991 , pVertexBindingDescriptions( pVertexBindingDescriptions_ )
6992 , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ )
6993 , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
6994 {
6995 }
6996
6997 PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs )
6998 {
6999 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
7000 }
7001
7002 PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs )
7003 {
7004 memcpy( this, &rhs, sizeof(PipelineVertexInputStateCreateInfo) );
7005 return *this;
7006 }
7007
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007008 PipelineVertexInputStateCreateInfo& setPNext( const void* pNext_ )
7009 {
7010 pNext = pNext_;
7011 return *this;
7012 }
7013
7014 PipelineVertexInputStateCreateInfo& setFlags( PipelineVertexInputStateCreateFlags flags_ )
7015 {
7016 flags = flags_;
7017 return *this;
7018 }
7019
7020 PipelineVertexInputStateCreateInfo& setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ )
7021 {
7022 vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
7023 return *this;
7024 }
7025
7026 PipelineVertexInputStateCreateInfo& setPVertexBindingDescriptions( const VertexInputBindingDescription* pVertexBindingDescriptions_ )
7027 {
7028 pVertexBindingDescriptions = pVertexBindingDescriptions_;
7029 return *this;
7030 }
7031
7032 PipelineVertexInputStateCreateInfo& setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ )
7033 {
7034 vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
7035 return *this;
7036 }
7037
7038 PipelineVertexInputStateCreateInfo& setPVertexAttributeDescriptions( const VertexInputAttributeDescription* pVertexAttributeDescriptions_ )
7039 {
7040 pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
7041 return *this;
7042 }
7043
7044 operator const VkPipelineVertexInputStateCreateInfo&() const
7045 {
7046 return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>(this);
7047 }
7048
7049 bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const
7050 {
7051 return ( sType == rhs.sType )
7052 && ( pNext == rhs.pNext )
7053 && ( flags == rhs.flags )
7054 && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
7055 && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
7056 && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
7057 && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
7058 }
7059
7060 bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const
7061 {
7062 return !operator==( rhs );
7063 }
7064
7065 private:
7066 StructureType sType;
7067
7068 public:
7069 const void* pNext;
7070 PipelineVertexInputStateCreateFlags flags;
7071 uint32_t vertexBindingDescriptionCount;
7072 const VertexInputBindingDescription* pVertexBindingDescriptions;
7073 uint32_t vertexAttributeDescriptionCount;
7074 const VertexInputAttributeDescription* pVertexAttributeDescriptions;
7075 };
7076 static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
7077
7078 struct PipelineInputAssemblyStateCreateInfo
7079 {
7080 PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = PipelineInputAssemblyStateCreateFlags(), PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = 0 )
7081 : sType( StructureType::ePipelineInputAssemblyStateCreateInfo )
7082 , pNext( nullptr )
7083 , flags( flags_ )
7084 , topology( topology_ )
7085 , primitiveRestartEnable( primitiveRestartEnable_ )
7086 {
7087 }
7088
7089 PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs )
7090 {
7091 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
7092 }
7093
7094 PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs )
7095 {
7096 memcpy( this, &rhs, sizeof(PipelineInputAssemblyStateCreateInfo) );
7097 return *this;
7098 }
7099
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007100 PipelineInputAssemblyStateCreateInfo& setPNext( const void* pNext_ )
7101 {
7102 pNext = pNext_;
7103 return *this;
7104 }
7105
7106 PipelineInputAssemblyStateCreateInfo& setFlags( PipelineInputAssemblyStateCreateFlags flags_ )
7107 {
7108 flags = flags_;
7109 return *this;
7110 }
7111
7112 PipelineInputAssemblyStateCreateInfo& setTopology( PrimitiveTopology topology_ )
7113 {
7114 topology = topology_;
7115 return *this;
7116 }
7117
7118 PipelineInputAssemblyStateCreateInfo& setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ )
7119 {
7120 primitiveRestartEnable = primitiveRestartEnable_;
7121 return *this;
7122 }
7123
7124 operator const VkPipelineInputAssemblyStateCreateInfo&() const
7125 {
7126 return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>(this);
7127 }
7128
7129 bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const
7130 {
7131 return ( sType == rhs.sType )
7132 && ( pNext == rhs.pNext )
7133 && ( flags == rhs.flags )
7134 && ( topology == rhs.topology )
7135 && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
7136 }
7137
7138 bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const
7139 {
7140 return !operator==( rhs );
7141 }
7142
7143 private:
7144 StructureType sType;
7145
7146 public:
7147 const void* pNext;
7148 PipelineInputAssemblyStateCreateFlags flags;
7149 PrimitiveTopology topology;
7150 Bool32 primitiveRestartEnable;
7151 };
7152 static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
7153
7154 struct PipelineTessellationStateCreateInfo
7155 {
7156 PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = PipelineTessellationStateCreateFlags(), uint32_t patchControlPoints_ = 0 )
7157 : sType( StructureType::ePipelineTessellationStateCreateInfo )
7158 , pNext( nullptr )
7159 , flags( flags_ )
7160 , patchControlPoints( patchControlPoints_ )
7161 {
7162 }
7163
7164 PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs )
7165 {
7166 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
7167 }
7168
7169 PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs )
7170 {
7171 memcpy( this, &rhs, sizeof(PipelineTessellationStateCreateInfo) );
7172 return *this;
7173 }
7174
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007175 PipelineTessellationStateCreateInfo& setPNext( const void* pNext_ )
7176 {
7177 pNext = pNext_;
7178 return *this;
7179 }
7180
7181 PipelineTessellationStateCreateInfo& setFlags( PipelineTessellationStateCreateFlags flags_ )
7182 {
7183 flags = flags_;
7184 return *this;
7185 }
7186
7187 PipelineTessellationStateCreateInfo& setPatchControlPoints( uint32_t patchControlPoints_ )
7188 {
7189 patchControlPoints = patchControlPoints_;
7190 return *this;
7191 }
7192
7193 operator const VkPipelineTessellationStateCreateInfo&() const
7194 {
7195 return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>(this);
7196 }
7197
7198 bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const
7199 {
7200 return ( sType == rhs.sType )
7201 && ( pNext == rhs.pNext )
7202 && ( flags == rhs.flags )
7203 && ( patchControlPoints == rhs.patchControlPoints );
7204 }
7205
7206 bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const
7207 {
7208 return !operator==( rhs );
7209 }
7210
7211 private:
7212 StructureType sType;
7213
7214 public:
7215 const void* pNext;
7216 PipelineTessellationStateCreateFlags flags;
7217 uint32_t patchControlPoints;
7218 };
7219 static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
7220
7221 struct PipelineViewportStateCreateInfo
7222 {
7223 PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = PipelineViewportStateCreateFlags(), uint32_t viewportCount_ = 0, const Viewport* pViewports_ = nullptr, uint32_t scissorCount_ = 0, const Rect2D* pScissors_ = nullptr )
7224 : sType( StructureType::ePipelineViewportStateCreateInfo )
7225 , pNext( nullptr )
7226 , flags( flags_ )
7227 , viewportCount( viewportCount_ )
7228 , pViewports( pViewports_ )
7229 , scissorCount( scissorCount_ )
7230 , pScissors( pScissors_ )
7231 {
7232 }
7233
7234 PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs )
7235 {
7236 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
7237 }
7238
7239 PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs )
7240 {
7241 memcpy( this, &rhs, sizeof(PipelineViewportStateCreateInfo) );
7242 return *this;
7243 }
7244
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007245 PipelineViewportStateCreateInfo& setPNext( const void* pNext_ )
7246 {
7247 pNext = pNext_;
7248 return *this;
7249 }
7250
7251 PipelineViewportStateCreateInfo& setFlags( PipelineViewportStateCreateFlags flags_ )
7252 {
7253 flags = flags_;
7254 return *this;
7255 }
7256
7257 PipelineViewportStateCreateInfo& setViewportCount( uint32_t viewportCount_ )
7258 {
7259 viewportCount = viewportCount_;
7260 return *this;
7261 }
7262
7263 PipelineViewportStateCreateInfo& setPViewports( const Viewport* pViewports_ )
7264 {
7265 pViewports = pViewports_;
7266 return *this;
7267 }
7268
7269 PipelineViewportStateCreateInfo& setScissorCount( uint32_t scissorCount_ )
7270 {
7271 scissorCount = scissorCount_;
7272 return *this;
7273 }
7274
7275 PipelineViewportStateCreateInfo& setPScissors( const Rect2D* pScissors_ )
7276 {
7277 pScissors = pScissors_;
7278 return *this;
7279 }
7280
7281 operator const VkPipelineViewportStateCreateInfo&() const
7282 {
7283 return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>(this);
7284 }
7285
7286 bool operator==( PipelineViewportStateCreateInfo const& rhs ) const
7287 {
7288 return ( sType == rhs.sType )
7289 && ( pNext == rhs.pNext )
7290 && ( flags == rhs.flags )
7291 && ( viewportCount == rhs.viewportCount )
7292 && ( pViewports == rhs.pViewports )
7293 && ( scissorCount == rhs.scissorCount )
7294 && ( pScissors == rhs.pScissors );
7295 }
7296
7297 bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const
7298 {
7299 return !operator==( rhs );
7300 }
7301
7302 private:
7303 StructureType sType;
7304
7305 public:
7306 const void* pNext;
7307 PipelineViewportStateCreateFlags flags;
7308 uint32_t viewportCount;
7309 const Viewport* pViewports;
7310 uint32_t scissorCount;
7311 const Rect2D* pScissors;
7312 };
7313 static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
7314
7315 struct PipelineRasterizationStateCreateInfo
7316 {
7317 PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = PipelineRasterizationStateCreateFlags(), Bool32 depthClampEnable_ = 0, Bool32 rasterizerDiscardEnable_ = 0, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = CullModeFlags(), FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = 0, float depthBiasConstantFactor_ = 0, float depthBiasClamp_ = 0, float depthBiasSlopeFactor_ = 0, float lineWidth_ = 0 )
7318 : sType( StructureType::ePipelineRasterizationStateCreateInfo )
7319 , pNext( nullptr )
7320 , flags( flags_ )
7321 , depthClampEnable( depthClampEnable_ )
7322 , rasterizerDiscardEnable( rasterizerDiscardEnable_ )
7323 , polygonMode( polygonMode_ )
7324 , cullMode( cullMode_ )
7325 , frontFace( frontFace_ )
7326 , depthBiasEnable( depthBiasEnable_ )
7327 , depthBiasConstantFactor( depthBiasConstantFactor_ )
7328 , depthBiasClamp( depthBiasClamp_ )
7329 , depthBiasSlopeFactor( depthBiasSlopeFactor_ )
7330 , lineWidth( lineWidth_ )
7331 {
7332 }
7333
7334 PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs )
7335 {
7336 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7337 }
7338
7339 PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs )
7340 {
7341 memcpy( this, &rhs, sizeof(PipelineRasterizationStateCreateInfo) );
7342 return *this;
7343 }
7344
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007345 PipelineRasterizationStateCreateInfo& setPNext( const void* pNext_ )
7346 {
7347 pNext = pNext_;
7348 return *this;
7349 }
7350
7351 PipelineRasterizationStateCreateInfo& setFlags( PipelineRasterizationStateCreateFlags flags_ )
7352 {
7353 flags = flags_;
7354 return *this;
7355 }
7356
7357 PipelineRasterizationStateCreateInfo& setDepthClampEnable( Bool32 depthClampEnable_ )
7358 {
7359 depthClampEnable = depthClampEnable_;
7360 return *this;
7361 }
7362
7363 PipelineRasterizationStateCreateInfo& setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ )
7364 {
7365 rasterizerDiscardEnable = rasterizerDiscardEnable_;
7366 return *this;
7367 }
7368
7369 PipelineRasterizationStateCreateInfo& setPolygonMode( PolygonMode polygonMode_ )
7370 {
7371 polygonMode = polygonMode_;
7372 return *this;
7373 }
7374
7375 PipelineRasterizationStateCreateInfo& setCullMode( CullModeFlags cullMode_ )
7376 {
7377 cullMode = cullMode_;
7378 return *this;
7379 }
7380
7381 PipelineRasterizationStateCreateInfo& setFrontFace( FrontFace frontFace_ )
7382 {
7383 frontFace = frontFace_;
7384 return *this;
7385 }
7386
7387 PipelineRasterizationStateCreateInfo& setDepthBiasEnable( Bool32 depthBiasEnable_ )
7388 {
7389 depthBiasEnable = depthBiasEnable_;
7390 return *this;
7391 }
7392
7393 PipelineRasterizationStateCreateInfo& setDepthBiasConstantFactor( float depthBiasConstantFactor_ )
7394 {
7395 depthBiasConstantFactor = depthBiasConstantFactor_;
7396 return *this;
7397 }
7398
7399 PipelineRasterizationStateCreateInfo& setDepthBiasClamp( float depthBiasClamp_ )
7400 {
7401 depthBiasClamp = depthBiasClamp_;
7402 return *this;
7403 }
7404
7405 PipelineRasterizationStateCreateInfo& setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ )
7406 {
7407 depthBiasSlopeFactor = depthBiasSlopeFactor_;
7408 return *this;
7409 }
7410
7411 PipelineRasterizationStateCreateInfo& setLineWidth( float lineWidth_ )
7412 {
7413 lineWidth = lineWidth_;
7414 return *this;
7415 }
7416
7417 operator const VkPipelineRasterizationStateCreateInfo&() const
7418 {
7419 return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>(this);
7420 }
7421
7422 bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const
7423 {
7424 return ( sType == rhs.sType )
7425 && ( pNext == rhs.pNext )
7426 && ( flags == rhs.flags )
7427 && ( depthClampEnable == rhs.depthClampEnable )
7428 && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
7429 && ( polygonMode == rhs.polygonMode )
7430 && ( cullMode == rhs.cullMode )
7431 && ( frontFace == rhs.frontFace )
7432 && ( depthBiasEnable == rhs.depthBiasEnable )
7433 && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
7434 && ( depthBiasClamp == rhs.depthBiasClamp )
7435 && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
7436 && ( lineWidth == rhs.lineWidth );
7437 }
7438
7439 bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const
7440 {
7441 return !operator==( rhs );
7442 }
7443
7444 private:
7445 StructureType sType;
7446
7447 public:
7448 const void* pNext;
7449 PipelineRasterizationStateCreateFlags flags;
7450 Bool32 depthClampEnable;
7451 Bool32 rasterizerDiscardEnable;
7452 PolygonMode polygonMode;
7453 CullModeFlags cullMode;
7454 FrontFace frontFace;
7455 Bool32 depthBiasEnable;
7456 float depthBiasConstantFactor;
7457 float depthBiasClamp;
7458 float depthBiasSlopeFactor;
7459 float lineWidth;
7460 };
7461 static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
7462
7463 struct PipelineDepthStencilStateCreateInfo
7464 {
7465 PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = PipelineDepthStencilStateCreateFlags(), Bool32 depthTestEnable_ = 0, Bool32 depthWriteEnable_ = 0, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = 0, Bool32 stencilTestEnable_ = 0, StencilOpState front_ = StencilOpState(), StencilOpState back_ = StencilOpState(), float minDepthBounds_ = 0, float maxDepthBounds_ = 0 )
7466 : sType( StructureType::ePipelineDepthStencilStateCreateInfo )
7467 , pNext( nullptr )
7468 , flags( flags_ )
7469 , depthTestEnable( depthTestEnable_ )
7470 , depthWriteEnable( depthWriteEnable_ )
7471 , depthCompareOp( depthCompareOp_ )
7472 , depthBoundsTestEnable( depthBoundsTestEnable_ )
7473 , stencilTestEnable( stencilTestEnable_ )
7474 , front( front_ )
7475 , back( back_ )
7476 , minDepthBounds( minDepthBounds_ )
7477 , maxDepthBounds( maxDepthBounds_ )
7478 {
7479 }
7480
7481 PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs )
7482 {
7483 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7484 }
7485
7486 PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs )
7487 {
7488 memcpy( this, &rhs, sizeof(PipelineDepthStencilStateCreateInfo) );
7489 return *this;
7490 }
7491
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007492 PipelineDepthStencilStateCreateInfo& setPNext( const void* pNext_ )
7493 {
7494 pNext = pNext_;
7495 return *this;
7496 }
7497
7498 PipelineDepthStencilStateCreateInfo& setFlags( PipelineDepthStencilStateCreateFlags flags_ )
7499 {
7500 flags = flags_;
7501 return *this;
7502 }
7503
7504 PipelineDepthStencilStateCreateInfo& setDepthTestEnable( Bool32 depthTestEnable_ )
7505 {
7506 depthTestEnable = depthTestEnable_;
7507 return *this;
7508 }
7509
7510 PipelineDepthStencilStateCreateInfo& setDepthWriteEnable( Bool32 depthWriteEnable_ )
7511 {
7512 depthWriteEnable = depthWriteEnable_;
7513 return *this;
7514 }
7515
7516 PipelineDepthStencilStateCreateInfo& setDepthCompareOp( CompareOp depthCompareOp_ )
7517 {
7518 depthCompareOp = depthCompareOp_;
7519 return *this;
7520 }
7521
7522 PipelineDepthStencilStateCreateInfo& setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ )
7523 {
7524 depthBoundsTestEnable = depthBoundsTestEnable_;
7525 return *this;
7526 }
7527
7528 PipelineDepthStencilStateCreateInfo& setStencilTestEnable( Bool32 stencilTestEnable_ )
7529 {
7530 stencilTestEnable = stencilTestEnable_;
7531 return *this;
7532 }
7533
7534 PipelineDepthStencilStateCreateInfo& setFront( StencilOpState front_ )
7535 {
7536 front = front_;
7537 return *this;
7538 }
7539
7540 PipelineDepthStencilStateCreateInfo& setBack( StencilOpState back_ )
7541 {
7542 back = back_;
7543 return *this;
7544 }
7545
7546 PipelineDepthStencilStateCreateInfo& setMinDepthBounds( float minDepthBounds_ )
7547 {
7548 minDepthBounds = minDepthBounds_;
7549 return *this;
7550 }
7551
7552 PipelineDepthStencilStateCreateInfo& setMaxDepthBounds( float maxDepthBounds_ )
7553 {
7554 maxDepthBounds = maxDepthBounds_;
7555 return *this;
7556 }
7557
7558 operator const VkPipelineDepthStencilStateCreateInfo&() const
7559 {
7560 return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>(this);
7561 }
7562
7563 bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const
7564 {
7565 return ( sType == rhs.sType )
7566 && ( pNext == rhs.pNext )
7567 && ( flags == rhs.flags )
7568 && ( depthTestEnable == rhs.depthTestEnable )
7569 && ( depthWriteEnable == rhs.depthWriteEnable )
7570 && ( depthCompareOp == rhs.depthCompareOp )
7571 && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
7572 && ( stencilTestEnable == rhs.stencilTestEnable )
7573 && ( front == rhs.front )
7574 && ( back == rhs.back )
7575 && ( minDepthBounds == rhs.minDepthBounds )
7576 && ( maxDepthBounds == rhs.maxDepthBounds );
7577 }
7578
7579 bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const
7580 {
7581 return !operator==( rhs );
7582 }
7583
7584 private:
7585 StructureType sType;
7586
7587 public:
7588 const void* pNext;
7589 PipelineDepthStencilStateCreateFlags flags;
7590 Bool32 depthTestEnable;
7591 Bool32 depthWriteEnable;
7592 CompareOp depthCompareOp;
7593 Bool32 depthBoundsTestEnable;
7594 Bool32 stencilTestEnable;
7595 StencilOpState front;
7596 StencilOpState back;
7597 float minDepthBounds;
7598 float maxDepthBounds;
7599 };
7600 static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
7601
7602 struct PipelineCacheCreateInfo
7603 {
7604 PipelineCacheCreateInfo( PipelineCacheCreateFlags flags_ = PipelineCacheCreateFlags(), size_t initialDataSize_ = 0, const void* pInitialData_ = nullptr )
7605 : sType( StructureType::ePipelineCacheCreateInfo )
7606 , pNext( nullptr )
7607 , flags( flags_ )
7608 , initialDataSize( initialDataSize_ )
7609 , pInitialData( pInitialData_ )
7610 {
7611 }
7612
7613 PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs )
7614 {
7615 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7616 }
7617
7618 PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs )
7619 {
7620 memcpy( this, &rhs, sizeof(PipelineCacheCreateInfo) );
7621 return *this;
7622 }
7623
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007624 PipelineCacheCreateInfo& setPNext( const void* pNext_ )
7625 {
7626 pNext = pNext_;
7627 return *this;
7628 }
7629
7630 PipelineCacheCreateInfo& setFlags( PipelineCacheCreateFlags flags_ )
7631 {
7632 flags = flags_;
7633 return *this;
7634 }
7635
7636 PipelineCacheCreateInfo& setInitialDataSize( size_t initialDataSize_ )
7637 {
7638 initialDataSize = initialDataSize_;
7639 return *this;
7640 }
7641
7642 PipelineCacheCreateInfo& setPInitialData( const void* pInitialData_ )
7643 {
7644 pInitialData = pInitialData_;
7645 return *this;
7646 }
7647
7648 operator const VkPipelineCacheCreateInfo&() const
7649 {
7650 return *reinterpret_cast<const VkPipelineCacheCreateInfo*>(this);
7651 }
7652
7653 bool operator==( PipelineCacheCreateInfo const& rhs ) const
7654 {
7655 return ( sType == rhs.sType )
7656 && ( pNext == rhs.pNext )
7657 && ( flags == rhs.flags )
7658 && ( initialDataSize == rhs.initialDataSize )
7659 && ( pInitialData == rhs.pInitialData );
7660 }
7661
7662 bool operator!=( PipelineCacheCreateInfo const& rhs ) const
7663 {
7664 return !operator==( rhs );
7665 }
7666
7667 private:
7668 StructureType sType;
7669
7670 public:
7671 const void* pNext;
7672 PipelineCacheCreateFlags flags;
7673 size_t initialDataSize;
7674 const void* pInitialData;
7675 };
7676 static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
7677
7678 struct SamplerCreateInfo
7679 {
7680 SamplerCreateInfo( SamplerCreateFlags flags_ = SamplerCreateFlags(), Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = 0, Bool32 anisotropyEnable_ = 0, float maxAnisotropy_ = 0, Bool32 compareEnable_ = 0, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = 0, float maxLod_ = 0, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = 0 )
7681 : sType( StructureType::eSamplerCreateInfo )
7682 , pNext( nullptr )
7683 , flags( flags_ )
7684 , magFilter( magFilter_ )
7685 , minFilter( minFilter_ )
7686 , mipmapMode( mipmapMode_ )
7687 , addressModeU( addressModeU_ )
7688 , addressModeV( addressModeV_ )
7689 , addressModeW( addressModeW_ )
7690 , mipLodBias( mipLodBias_ )
7691 , anisotropyEnable( anisotropyEnable_ )
7692 , maxAnisotropy( maxAnisotropy_ )
7693 , compareEnable( compareEnable_ )
7694 , compareOp( compareOp_ )
7695 , minLod( minLod_ )
7696 , maxLod( maxLod_ )
7697 , borderColor( borderColor_ )
7698 , unnormalizedCoordinates( unnormalizedCoordinates_ )
7699 {
7700 }
7701
7702 SamplerCreateInfo( VkSamplerCreateInfo const & rhs )
7703 {
7704 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7705 }
7706
7707 SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs )
7708 {
7709 memcpy( this, &rhs, sizeof(SamplerCreateInfo) );
7710 return *this;
7711 }
7712
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007713 SamplerCreateInfo& setPNext( const void* pNext_ )
7714 {
7715 pNext = pNext_;
7716 return *this;
7717 }
7718
7719 SamplerCreateInfo& setFlags( SamplerCreateFlags flags_ )
7720 {
7721 flags = flags_;
7722 return *this;
7723 }
7724
7725 SamplerCreateInfo& setMagFilter( Filter magFilter_ )
7726 {
7727 magFilter = magFilter_;
7728 return *this;
7729 }
7730
7731 SamplerCreateInfo& setMinFilter( Filter minFilter_ )
7732 {
7733 minFilter = minFilter_;
7734 return *this;
7735 }
7736
7737 SamplerCreateInfo& setMipmapMode( SamplerMipmapMode mipmapMode_ )
7738 {
7739 mipmapMode = mipmapMode_;
7740 return *this;
7741 }
7742
7743 SamplerCreateInfo& setAddressModeU( SamplerAddressMode addressModeU_ )
7744 {
7745 addressModeU = addressModeU_;
7746 return *this;
7747 }
7748
7749 SamplerCreateInfo& setAddressModeV( SamplerAddressMode addressModeV_ )
7750 {
7751 addressModeV = addressModeV_;
7752 return *this;
7753 }
7754
7755 SamplerCreateInfo& setAddressModeW( SamplerAddressMode addressModeW_ )
7756 {
7757 addressModeW = addressModeW_;
7758 return *this;
7759 }
7760
7761 SamplerCreateInfo& setMipLodBias( float mipLodBias_ )
7762 {
7763 mipLodBias = mipLodBias_;
7764 return *this;
7765 }
7766
7767 SamplerCreateInfo& setAnisotropyEnable( Bool32 anisotropyEnable_ )
7768 {
7769 anisotropyEnable = anisotropyEnable_;
7770 return *this;
7771 }
7772
7773 SamplerCreateInfo& setMaxAnisotropy( float maxAnisotropy_ )
7774 {
7775 maxAnisotropy = maxAnisotropy_;
7776 return *this;
7777 }
7778
7779 SamplerCreateInfo& setCompareEnable( Bool32 compareEnable_ )
7780 {
7781 compareEnable = compareEnable_;
7782 return *this;
7783 }
7784
7785 SamplerCreateInfo& setCompareOp( CompareOp compareOp_ )
7786 {
7787 compareOp = compareOp_;
7788 return *this;
7789 }
7790
7791 SamplerCreateInfo& setMinLod( float minLod_ )
7792 {
7793 minLod = minLod_;
7794 return *this;
7795 }
7796
7797 SamplerCreateInfo& setMaxLod( float maxLod_ )
7798 {
7799 maxLod = maxLod_;
7800 return *this;
7801 }
7802
7803 SamplerCreateInfo& setBorderColor( BorderColor borderColor_ )
7804 {
7805 borderColor = borderColor_;
7806 return *this;
7807 }
7808
7809 SamplerCreateInfo& setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ )
7810 {
7811 unnormalizedCoordinates = unnormalizedCoordinates_;
7812 return *this;
7813 }
7814
7815 operator const VkSamplerCreateInfo&() const
7816 {
7817 return *reinterpret_cast<const VkSamplerCreateInfo*>(this);
7818 }
7819
7820 bool operator==( SamplerCreateInfo const& rhs ) const
7821 {
7822 return ( sType == rhs.sType )
7823 && ( pNext == rhs.pNext )
7824 && ( flags == rhs.flags )
7825 && ( magFilter == rhs.magFilter )
7826 && ( minFilter == rhs.minFilter )
7827 && ( mipmapMode == rhs.mipmapMode )
7828 && ( addressModeU == rhs.addressModeU )
7829 && ( addressModeV == rhs.addressModeV )
7830 && ( addressModeW == rhs.addressModeW )
7831 && ( mipLodBias == rhs.mipLodBias )
7832 && ( anisotropyEnable == rhs.anisotropyEnable )
7833 && ( maxAnisotropy == rhs.maxAnisotropy )
7834 && ( compareEnable == rhs.compareEnable )
7835 && ( compareOp == rhs.compareOp )
7836 && ( minLod == rhs.minLod )
7837 && ( maxLod == rhs.maxLod )
7838 && ( borderColor == rhs.borderColor )
7839 && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
7840 }
7841
7842 bool operator!=( SamplerCreateInfo const& rhs ) const
7843 {
7844 return !operator==( rhs );
7845 }
7846
7847 private:
7848 StructureType sType;
7849
7850 public:
7851 const void* pNext;
7852 SamplerCreateFlags flags;
7853 Filter magFilter;
7854 Filter minFilter;
7855 SamplerMipmapMode mipmapMode;
7856 SamplerAddressMode addressModeU;
7857 SamplerAddressMode addressModeV;
7858 SamplerAddressMode addressModeW;
7859 float mipLodBias;
7860 Bool32 anisotropyEnable;
7861 float maxAnisotropy;
7862 Bool32 compareEnable;
7863 CompareOp compareOp;
7864 float minLod;
7865 float maxLod;
7866 BorderColor borderColor;
7867 Bool32 unnormalizedCoordinates;
7868 };
7869 static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
7870
7871 struct CommandBufferAllocateInfo
7872 {
7873 CommandBufferAllocateInfo( CommandPool commandPool_ = CommandPool(), CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = 0 )
7874 : sType( StructureType::eCommandBufferAllocateInfo )
7875 , pNext( nullptr )
7876 , commandPool( commandPool_ )
7877 , level( level_ )
7878 , commandBufferCount( commandBufferCount_ )
7879 {
7880 }
7881
7882 CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs )
7883 {
7884 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7885 }
7886
7887 CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs )
7888 {
7889 memcpy( this, &rhs, sizeof(CommandBufferAllocateInfo) );
7890 return *this;
7891 }
7892
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007893 CommandBufferAllocateInfo& setPNext( const void* pNext_ )
7894 {
7895 pNext = pNext_;
7896 return *this;
7897 }
7898
7899 CommandBufferAllocateInfo& setCommandPool( CommandPool commandPool_ )
7900 {
7901 commandPool = commandPool_;
7902 return *this;
7903 }
7904
7905 CommandBufferAllocateInfo& setLevel( CommandBufferLevel level_ )
7906 {
7907 level = level_;
7908 return *this;
7909 }
7910
7911 CommandBufferAllocateInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
7912 {
7913 commandBufferCount = commandBufferCount_;
7914 return *this;
7915 }
7916
7917 operator const VkCommandBufferAllocateInfo&() const
7918 {
7919 return *reinterpret_cast<const VkCommandBufferAllocateInfo*>(this);
7920 }
7921
7922 bool operator==( CommandBufferAllocateInfo const& rhs ) const
7923 {
7924 return ( sType == rhs.sType )
7925 && ( pNext == rhs.pNext )
7926 && ( commandPool == rhs.commandPool )
7927 && ( level == rhs.level )
7928 && ( commandBufferCount == rhs.commandBufferCount );
7929 }
7930
7931 bool operator!=( CommandBufferAllocateInfo const& rhs ) const
7932 {
7933 return !operator==( rhs );
7934 }
7935
7936 private:
7937 StructureType sType;
7938
7939 public:
7940 const void* pNext;
7941 CommandPool commandPool;
7942 CommandBufferLevel level;
7943 uint32_t commandBufferCount;
7944 };
7945 static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
7946
7947 struct RenderPassBeginInfo
7948 {
7949 RenderPassBeginInfo( RenderPass renderPass_ = RenderPass(), Framebuffer framebuffer_ = Framebuffer(), Rect2D renderArea_ = Rect2D(), uint32_t clearValueCount_ = 0, const ClearValue* pClearValues_ = nullptr )
7950 : sType( StructureType::eRenderPassBeginInfo )
7951 , pNext( nullptr )
7952 , renderPass( renderPass_ )
7953 , framebuffer( framebuffer_ )
7954 , renderArea( renderArea_ )
7955 , clearValueCount( clearValueCount_ )
7956 , pClearValues( pClearValues_ )
7957 {
7958 }
7959
7960 RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs )
7961 {
7962 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7963 }
7964
7965 RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs )
7966 {
7967 memcpy( this, &rhs, sizeof(RenderPassBeginInfo) );
7968 return *this;
7969 }
7970
Lenny Komowbed9b5c2016-08-11 11:23:15 -06007971 RenderPassBeginInfo& setPNext( const void* pNext_ )
7972 {
7973 pNext = pNext_;
7974 return *this;
7975 }
7976
7977 RenderPassBeginInfo& setRenderPass( RenderPass renderPass_ )
7978 {
7979 renderPass = renderPass_;
7980 return *this;
7981 }
7982
7983 RenderPassBeginInfo& setFramebuffer( Framebuffer framebuffer_ )
7984 {
7985 framebuffer = framebuffer_;
7986 return *this;
7987 }
7988
7989 RenderPassBeginInfo& setRenderArea( Rect2D renderArea_ )
7990 {
7991 renderArea = renderArea_;
7992 return *this;
7993 }
7994
7995 RenderPassBeginInfo& setClearValueCount( uint32_t clearValueCount_ )
7996 {
7997 clearValueCount = clearValueCount_;
7998 return *this;
7999 }
8000
8001 RenderPassBeginInfo& setPClearValues( const ClearValue* pClearValues_ )
8002 {
8003 pClearValues = pClearValues_;
8004 return *this;
8005 }
8006
8007 operator const VkRenderPassBeginInfo&() const
8008 {
8009 return *reinterpret_cast<const VkRenderPassBeginInfo*>(this);
8010 }
8011
8012 bool operator==( RenderPassBeginInfo const& rhs ) const
8013 {
8014 return ( sType == rhs.sType )
8015 && ( pNext == rhs.pNext )
8016 && ( renderPass == rhs.renderPass )
8017 && ( framebuffer == rhs.framebuffer )
8018 && ( renderArea == rhs.renderArea )
8019 && ( clearValueCount == rhs.clearValueCount )
8020 && ( pClearValues == rhs.pClearValues );
8021 }
8022
8023 bool operator!=( RenderPassBeginInfo const& rhs ) const
8024 {
8025 return !operator==( rhs );
8026 }
8027
8028 private:
8029 StructureType sType;
8030
8031 public:
8032 const void* pNext;
8033 RenderPass renderPass;
8034 Framebuffer framebuffer;
8035 Rect2D renderArea;
8036 uint32_t clearValueCount;
8037 const ClearValue* pClearValues;
8038 };
8039 static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
8040
8041 struct EventCreateInfo
8042 {
8043 EventCreateInfo( EventCreateFlags flags_ = EventCreateFlags() )
8044 : sType( StructureType::eEventCreateInfo )
8045 , pNext( nullptr )
8046 , flags( flags_ )
8047 {
8048 }
8049
8050 EventCreateInfo( VkEventCreateInfo const & rhs )
8051 {
8052 memcpy( this, &rhs, sizeof(EventCreateInfo) );
8053 }
8054
8055 EventCreateInfo& operator=( VkEventCreateInfo const & rhs )
8056 {
8057 memcpy( this, &rhs, sizeof(EventCreateInfo) );
8058 return *this;
8059 }
8060
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008061 EventCreateInfo& setPNext( const void* pNext_ )
8062 {
8063 pNext = pNext_;
8064 return *this;
8065 }
8066
8067 EventCreateInfo& setFlags( EventCreateFlags flags_ )
8068 {
8069 flags = flags_;
8070 return *this;
8071 }
8072
8073 operator const VkEventCreateInfo&() const
8074 {
8075 return *reinterpret_cast<const VkEventCreateInfo*>(this);
8076 }
8077
8078 bool operator==( EventCreateInfo const& rhs ) const
8079 {
8080 return ( sType == rhs.sType )
8081 && ( pNext == rhs.pNext )
8082 && ( flags == rhs.flags );
8083 }
8084
8085 bool operator!=( EventCreateInfo const& rhs ) const
8086 {
8087 return !operator==( rhs );
8088 }
8089
8090 private:
8091 StructureType sType;
8092
8093 public:
8094 const void* pNext;
8095 EventCreateFlags flags;
8096 };
8097 static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
8098
8099 struct SemaphoreCreateInfo
8100 {
8101 SemaphoreCreateInfo( SemaphoreCreateFlags flags_ = SemaphoreCreateFlags() )
8102 : sType( StructureType::eSemaphoreCreateInfo )
8103 , pNext( nullptr )
8104 , flags( flags_ )
8105 {
8106 }
8107
8108 SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs )
8109 {
8110 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
8111 }
8112
8113 SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs )
8114 {
8115 memcpy( this, &rhs, sizeof(SemaphoreCreateInfo) );
8116 return *this;
8117 }
8118
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008119 SemaphoreCreateInfo& setPNext( const void* pNext_ )
8120 {
8121 pNext = pNext_;
8122 return *this;
8123 }
8124
8125 SemaphoreCreateInfo& setFlags( SemaphoreCreateFlags flags_ )
8126 {
8127 flags = flags_;
8128 return *this;
8129 }
8130
8131 operator const VkSemaphoreCreateInfo&() const
8132 {
8133 return *reinterpret_cast<const VkSemaphoreCreateInfo*>(this);
8134 }
8135
8136 bool operator==( SemaphoreCreateInfo const& rhs ) const
8137 {
8138 return ( sType == rhs.sType )
8139 && ( pNext == rhs.pNext )
8140 && ( flags == rhs.flags );
8141 }
8142
8143 bool operator!=( SemaphoreCreateInfo const& rhs ) const
8144 {
8145 return !operator==( rhs );
8146 }
8147
8148 private:
8149 StructureType sType;
8150
8151 public:
8152 const void* pNext;
8153 SemaphoreCreateFlags flags;
8154 };
8155 static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
8156
8157 struct FramebufferCreateInfo
8158 {
8159 FramebufferCreateInfo( FramebufferCreateFlags flags_ = FramebufferCreateFlags(), RenderPass renderPass_ = RenderPass(), uint32_t attachmentCount_ = 0, const ImageView* pAttachments_ = nullptr, uint32_t width_ = 0, uint32_t height_ = 0, uint32_t layers_ = 0 )
8160 : sType( StructureType::eFramebufferCreateInfo )
8161 , pNext( nullptr )
8162 , flags( flags_ )
8163 , renderPass( renderPass_ )
8164 , attachmentCount( attachmentCount_ )
8165 , pAttachments( pAttachments_ )
8166 , width( width_ )
8167 , height( height_ )
8168 , layers( layers_ )
8169 {
8170 }
8171
8172 FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs )
8173 {
8174 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
8175 }
8176
8177 FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs )
8178 {
8179 memcpy( this, &rhs, sizeof(FramebufferCreateInfo) );
8180 return *this;
8181 }
8182
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008183 FramebufferCreateInfo& setPNext( const void* pNext_ )
8184 {
8185 pNext = pNext_;
8186 return *this;
8187 }
8188
8189 FramebufferCreateInfo& setFlags( FramebufferCreateFlags flags_ )
8190 {
8191 flags = flags_;
8192 return *this;
8193 }
8194
8195 FramebufferCreateInfo& setRenderPass( RenderPass renderPass_ )
8196 {
8197 renderPass = renderPass_;
8198 return *this;
8199 }
8200
8201 FramebufferCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
8202 {
8203 attachmentCount = attachmentCount_;
8204 return *this;
8205 }
8206
8207 FramebufferCreateInfo& setPAttachments( const ImageView* pAttachments_ )
8208 {
8209 pAttachments = pAttachments_;
8210 return *this;
8211 }
8212
8213 FramebufferCreateInfo& setWidth( uint32_t width_ )
8214 {
8215 width = width_;
8216 return *this;
8217 }
8218
8219 FramebufferCreateInfo& setHeight( uint32_t height_ )
8220 {
8221 height = height_;
8222 return *this;
8223 }
8224
8225 FramebufferCreateInfo& setLayers( uint32_t layers_ )
8226 {
8227 layers = layers_;
8228 return *this;
8229 }
8230
8231 operator const VkFramebufferCreateInfo&() const
8232 {
8233 return *reinterpret_cast<const VkFramebufferCreateInfo*>(this);
8234 }
8235
8236 bool operator==( FramebufferCreateInfo const& rhs ) const
8237 {
8238 return ( sType == rhs.sType )
8239 && ( pNext == rhs.pNext )
8240 && ( flags == rhs.flags )
8241 && ( renderPass == rhs.renderPass )
8242 && ( attachmentCount == rhs.attachmentCount )
8243 && ( pAttachments == rhs.pAttachments )
8244 && ( width == rhs.width )
8245 && ( height == rhs.height )
8246 && ( layers == rhs.layers );
8247 }
8248
8249 bool operator!=( FramebufferCreateInfo const& rhs ) const
8250 {
8251 return !operator==( rhs );
8252 }
8253
8254 private:
8255 StructureType sType;
8256
8257 public:
8258 const void* pNext;
8259 FramebufferCreateFlags flags;
8260 RenderPass renderPass;
8261 uint32_t attachmentCount;
8262 const ImageView* pAttachments;
8263 uint32_t width;
8264 uint32_t height;
8265 uint32_t layers;
8266 };
8267 static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
8268
8269 struct DisplayModeCreateInfoKHR
8270 {
8271 DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = DisplayModeCreateFlagsKHR(), DisplayModeParametersKHR parameters_ = DisplayModeParametersKHR() )
8272 : sType( StructureType::eDisplayModeCreateInfoKHR )
8273 , pNext( nullptr )
8274 , flags( flags_ )
8275 , parameters( parameters_ )
8276 {
8277 }
8278
8279 DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs )
8280 {
8281 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8282 }
8283
8284 DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs )
8285 {
8286 memcpy( this, &rhs, sizeof(DisplayModeCreateInfoKHR) );
8287 return *this;
8288 }
8289
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008290 DisplayModeCreateInfoKHR& setPNext( const void* pNext_ )
8291 {
8292 pNext = pNext_;
8293 return *this;
8294 }
8295
8296 DisplayModeCreateInfoKHR& setFlags( DisplayModeCreateFlagsKHR flags_ )
8297 {
8298 flags = flags_;
8299 return *this;
8300 }
8301
8302 DisplayModeCreateInfoKHR& setParameters( DisplayModeParametersKHR parameters_ )
8303 {
8304 parameters = parameters_;
8305 return *this;
8306 }
8307
8308 operator const VkDisplayModeCreateInfoKHR&() const
8309 {
8310 return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>(this);
8311 }
8312
8313 bool operator==( DisplayModeCreateInfoKHR const& rhs ) const
8314 {
8315 return ( sType == rhs.sType )
8316 && ( pNext == rhs.pNext )
8317 && ( flags == rhs.flags )
8318 && ( parameters == rhs.parameters );
8319 }
8320
8321 bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const
8322 {
8323 return !operator==( rhs );
8324 }
8325
8326 private:
8327 StructureType sType;
8328
8329 public:
8330 const void* pNext;
8331 DisplayModeCreateFlagsKHR flags;
8332 DisplayModeParametersKHR parameters;
8333 };
8334 static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
8335
8336 struct DisplayPresentInfoKHR
8337 {
8338 DisplayPresentInfoKHR( Rect2D srcRect_ = Rect2D(), Rect2D dstRect_ = Rect2D(), Bool32 persistent_ = 0 )
8339 : sType( StructureType::eDisplayPresentInfoKHR )
8340 , pNext( nullptr )
8341 , srcRect( srcRect_ )
8342 , dstRect( dstRect_ )
8343 , persistent( persistent_ )
8344 {
8345 }
8346
8347 DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs )
8348 {
8349 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8350 }
8351
8352 DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs )
8353 {
8354 memcpy( this, &rhs, sizeof(DisplayPresentInfoKHR) );
8355 return *this;
8356 }
8357
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008358 DisplayPresentInfoKHR& setPNext( const void* pNext_ )
8359 {
8360 pNext = pNext_;
8361 return *this;
8362 }
8363
8364 DisplayPresentInfoKHR& setSrcRect( Rect2D srcRect_ )
8365 {
8366 srcRect = srcRect_;
8367 return *this;
8368 }
8369
8370 DisplayPresentInfoKHR& setDstRect( Rect2D dstRect_ )
8371 {
8372 dstRect = dstRect_;
8373 return *this;
8374 }
8375
8376 DisplayPresentInfoKHR& setPersistent( Bool32 persistent_ )
8377 {
8378 persistent = persistent_;
8379 return *this;
8380 }
8381
8382 operator const VkDisplayPresentInfoKHR&() const
8383 {
8384 return *reinterpret_cast<const VkDisplayPresentInfoKHR*>(this);
8385 }
8386
8387 bool operator==( DisplayPresentInfoKHR const& rhs ) const
8388 {
8389 return ( sType == rhs.sType )
8390 && ( pNext == rhs.pNext )
8391 && ( srcRect == rhs.srcRect )
8392 && ( dstRect == rhs.dstRect )
8393 && ( persistent == rhs.persistent );
8394 }
8395
8396 bool operator!=( DisplayPresentInfoKHR const& rhs ) const
8397 {
8398 return !operator==( rhs );
8399 }
8400
8401 private:
8402 StructureType sType;
8403
8404 public:
8405 const void* pNext;
8406 Rect2D srcRect;
8407 Rect2D dstRect;
8408 Bool32 persistent;
8409 };
8410 static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
8411
8412#ifdef VK_USE_PLATFORM_ANDROID_KHR
8413 struct AndroidSurfaceCreateInfoKHR
8414 {
8415 AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = AndroidSurfaceCreateFlagsKHR(), ANativeWindow* window_ = nullptr )
8416 : sType( StructureType::eAndroidSurfaceCreateInfoKHR )
8417 , pNext( nullptr )
8418 , flags( flags_ )
8419 , window( window_ )
8420 {
8421 }
8422
8423 AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs )
8424 {
8425 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8426 }
8427
8428 AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs )
8429 {
8430 memcpy( this, &rhs, sizeof(AndroidSurfaceCreateInfoKHR) );
8431 return *this;
8432 }
8433
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008434 AndroidSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8435 {
8436 pNext = pNext_;
8437 return *this;
8438 }
8439
8440 AndroidSurfaceCreateInfoKHR& setFlags( AndroidSurfaceCreateFlagsKHR flags_ )
8441 {
8442 flags = flags_;
8443 return *this;
8444 }
8445
8446 AndroidSurfaceCreateInfoKHR& setWindow( ANativeWindow* window_ )
8447 {
8448 window = window_;
8449 return *this;
8450 }
8451
8452 operator const VkAndroidSurfaceCreateInfoKHR&() const
8453 {
8454 return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>(this);
8455 }
8456
8457 bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const
8458 {
8459 return ( sType == rhs.sType )
8460 && ( pNext == rhs.pNext )
8461 && ( flags == rhs.flags )
8462 && ( window == rhs.window );
8463 }
8464
8465 bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const
8466 {
8467 return !operator==( rhs );
8468 }
8469
8470 private:
8471 StructureType sType;
8472
8473 public:
8474 const void* pNext;
8475 AndroidSurfaceCreateFlagsKHR flags;
8476 ANativeWindow* window;
8477 };
8478 static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8479#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
8480
8481#ifdef VK_USE_PLATFORM_MIR_KHR
8482 struct MirSurfaceCreateInfoKHR
8483 {
8484 MirSurfaceCreateInfoKHR( MirSurfaceCreateFlagsKHR flags_ = MirSurfaceCreateFlagsKHR(), MirConnection* connection_ = nullptr, MirSurface* mirSurface_ = nullptr )
8485 : sType( StructureType::eMirSurfaceCreateInfoKHR )
8486 , pNext( nullptr )
8487 , flags( flags_ )
8488 , connection( connection_ )
8489 , mirSurface( mirSurface_ )
8490 {
8491 }
8492
8493 MirSurfaceCreateInfoKHR( VkMirSurfaceCreateInfoKHR const & rhs )
8494 {
8495 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8496 }
8497
8498 MirSurfaceCreateInfoKHR& operator=( VkMirSurfaceCreateInfoKHR const & rhs )
8499 {
8500 memcpy( this, &rhs, sizeof(MirSurfaceCreateInfoKHR) );
8501 return *this;
8502 }
8503
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008504 MirSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8505 {
8506 pNext = pNext_;
8507 return *this;
8508 }
8509
8510 MirSurfaceCreateInfoKHR& setFlags( MirSurfaceCreateFlagsKHR flags_ )
8511 {
8512 flags = flags_;
8513 return *this;
8514 }
8515
8516 MirSurfaceCreateInfoKHR& setConnection( MirConnection* connection_ )
8517 {
8518 connection = connection_;
8519 return *this;
8520 }
8521
8522 MirSurfaceCreateInfoKHR& setMirSurface( MirSurface* mirSurface_ )
8523 {
8524 mirSurface = mirSurface_;
8525 return *this;
8526 }
8527
8528 operator const VkMirSurfaceCreateInfoKHR&() const
8529 {
8530 return *reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>(this);
8531 }
8532
8533 bool operator==( MirSurfaceCreateInfoKHR const& rhs ) const
8534 {
8535 return ( sType == rhs.sType )
8536 && ( pNext == rhs.pNext )
8537 && ( flags == rhs.flags )
8538 && ( connection == rhs.connection )
8539 && ( mirSurface == rhs.mirSurface );
8540 }
8541
8542 bool operator!=( MirSurfaceCreateInfoKHR const& rhs ) const
8543 {
8544 return !operator==( rhs );
8545 }
8546
8547 private:
8548 StructureType sType;
8549
8550 public:
8551 const void* pNext;
8552 MirSurfaceCreateFlagsKHR flags;
8553 MirConnection* connection;
8554 MirSurface* mirSurface;
8555 };
8556 static_assert( sizeof( MirSurfaceCreateInfoKHR ) == sizeof( VkMirSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8557#endif /*VK_USE_PLATFORM_MIR_KHR*/
8558
Mark Young39389872017-01-19 21:10:49 -07008559#ifdef VK_USE_PLATFORM_VI_NN
8560 struct ViSurfaceCreateInfoNN
8561 {
8562 ViSurfaceCreateInfoNN( ViSurfaceCreateFlagsNN flags_ = ViSurfaceCreateFlagsNN(), void* window_ = nullptr )
8563 : sType( StructureType::eViSurfaceCreateInfoNN )
8564 , pNext( nullptr )
8565 , flags( flags_ )
8566 , window( window_ )
8567 {
8568 }
8569
8570 ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs )
8571 {
8572 memcpy( this, &rhs, sizeof(ViSurfaceCreateInfoNN) );
8573 }
8574
8575 ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs )
8576 {
8577 memcpy( this, &rhs, sizeof(ViSurfaceCreateInfoNN) );
8578 return *this;
8579 }
8580
Mark Young39389872017-01-19 21:10:49 -07008581 ViSurfaceCreateInfoNN& setPNext( const void* pNext_ )
8582 {
8583 pNext = pNext_;
8584 return *this;
8585 }
8586
8587 ViSurfaceCreateInfoNN& setFlags( ViSurfaceCreateFlagsNN flags_ )
8588 {
8589 flags = flags_;
8590 return *this;
8591 }
8592
8593 ViSurfaceCreateInfoNN& setWindow( void* window_ )
8594 {
8595 window = window_;
8596 return *this;
8597 }
8598
8599 operator const VkViSurfaceCreateInfoNN&() const
8600 {
8601 return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>(this);
8602 }
8603
8604 bool operator==( ViSurfaceCreateInfoNN const& rhs ) const
8605 {
8606 return ( sType == rhs.sType )
8607 && ( pNext == rhs.pNext )
8608 && ( flags == rhs.flags )
8609 && ( window == rhs.window );
8610 }
8611
8612 bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const
8613 {
8614 return !operator==( rhs );
8615 }
8616
8617 private:
8618 StructureType sType;
8619
8620 public:
8621 const void* pNext;
8622 ViSurfaceCreateFlagsNN flags;
8623 void* window;
8624 };
8625 static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
8626#endif /*VK_USE_PLATFORM_VI_NN*/
8627
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008628#ifdef VK_USE_PLATFORM_WAYLAND_KHR
8629 struct WaylandSurfaceCreateInfoKHR
8630 {
8631 WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateFlagsKHR flags_ = WaylandSurfaceCreateFlagsKHR(), struct wl_display* display_ = nullptr, struct wl_surface* surface_ = nullptr )
8632 : sType( StructureType::eWaylandSurfaceCreateInfoKHR )
8633 , pNext( nullptr )
8634 , flags( flags_ )
8635 , display( display_ )
8636 , surface( surface_ )
8637 {
8638 }
8639
8640 WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs )
8641 {
8642 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8643 }
8644
8645 WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs )
8646 {
8647 memcpy( this, &rhs, sizeof(WaylandSurfaceCreateInfoKHR) );
8648 return *this;
8649 }
8650
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008651 WaylandSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8652 {
8653 pNext = pNext_;
8654 return *this;
8655 }
8656
8657 WaylandSurfaceCreateInfoKHR& setFlags( WaylandSurfaceCreateFlagsKHR flags_ )
8658 {
8659 flags = flags_;
8660 return *this;
8661 }
8662
8663 WaylandSurfaceCreateInfoKHR& setDisplay( struct wl_display* display_ )
8664 {
8665 display = display_;
8666 return *this;
8667 }
8668
8669 WaylandSurfaceCreateInfoKHR& setSurface( struct wl_surface* surface_ )
8670 {
8671 surface = surface_;
8672 return *this;
8673 }
8674
8675 operator const VkWaylandSurfaceCreateInfoKHR&() const
8676 {
8677 return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>(this);
8678 }
8679
8680 bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const
8681 {
8682 return ( sType == rhs.sType )
8683 && ( pNext == rhs.pNext )
8684 && ( flags == rhs.flags )
8685 && ( display == rhs.display )
8686 && ( surface == rhs.surface );
8687 }
8688
8689 bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const
8690 {
8691 return !operator==( rhs );
8692 }
8693
8694 private:
8695 StructureType sType;
8696
8697 public:
8698 const void* pNext;
8699 WaylandSurfaceCreateFlagsKHR flags;
8700 struct wl_display* display;
8701 struct wl_surface* surface;
8702 };
8703 static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8704#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
8705
8706#ifdef VK_USE_PLATFORM_WIN32_KHR
8707 struct Win32SurfaceCreateInfoKHR
8708 {
8709 Win32SurfaceCreateInfoKHR( Win32SurfaceCreateFlagsKHR flags_ = Win32SurfaceCreateFlagsKHR(), HINSTANCE hinstance_ = 0, HWND hwnd_ = 0 )
8710 : sType( StructureType::eWin32SurfaceCreateInfoKHR )
8711 , pNext( nullptr )
8712 , flags( flags_ )
8713 , hinstance( hinstance_ )
8714 , hwnd( hwnd_ )
8715 {
8716 }
8717
8718 Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs )
8719 {
8720 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8721 }
8722
8723 Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs )
8724 {
8725 memcpy( this, &rhs, sizeof(Win32SurfaceCreateInfoKHR) );
8726 return *this;
8727 }
8728
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008729 Win32SurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8730 {
8731 pNext = pNext_;
8732 return *this;
8733 }
8734
8735 Win32SurfaceCreateInfoKHR& setFlags( Win32SurfaceCreateFlagsKHR flags_ )
8736 {
8737 flags = flags_;
8738 return *this;
8739 }
8740
8741 Win32SurfaceCreateInfoKHR& setHinstance( HINSTANCE hinstance_ )
8742 {
8743 hinstance = hinstance_;
8744 return *this;
8745 }
8746
8747 Win32SurfaceCreateInfoKHR& setHwnd( HWND hwnd_ )
8748 {
8749 hwnd = hwnd_;
8750 return *this;
8751 }
8752
8753 operator const VkWin32SurfaceCreateInfoKHR&() const
8754 {
8755 return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>(this);
8756 }
8757
8758 bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const
8759 {
8760 return ( sType == rhs.sType )
8761 && ( pNext == rhs.pNext )
8762 && ( flags == rhs.flags )
8763 && ( hinstance == rhs.hinstance )
8764 && ( hwnd == rhs.hwnd );
8765 }
8766
8767 bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const
8768 {
8769 return !operator==( rhs );
8770 }
8771
8772 private:
8773 StructureType sType;
8774
8775 public:
8776 const void* pNext;
8777 Win32SurfaceCreateFlagsKHR flags;
8778 HINSTANCE hinstance;
8779 HWND hwnd;
8780 };
8781 static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8782#endif /*VK_USE_PLATFORM_WIN32_KHR*/
8783
8784#ifdef VK_USE_PLATFORM_XLIB_KHR
8785 struct XlibSurfaceCreateInfoKHR
8786 {
8787 XlibSurfaceCreateInfoKHR( XlibSurfaceCreateFlagsKHR flags_ = XlibSurfaceCreateFlagsKHR(), Display* dpy_ = nullptr, Window window_ = 0 )
8788 : sType( StructureType::eXlibSurfaceCreateInfoKHR )
8789 , pNext( nullptr )
8790 , flags( flags_ )
8791 , dpy( dpy_ )
8792 , window( window_ )
8793 {
8794 }
8795
8796 XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs )
8797 {
8798 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8799 }
8800
8801 XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs )
8802 {
8803 memcpy( this, &rhs, sizeof(XlibSurfaceCreateInfoKHR) );
8804 return *this;
8805 }
8806
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008807 XlibSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8808 {
8809 pNext = pNext_;
8810 return *this;
8811 }
8812
8813 XlibSurfaceCreateInfoKHR& setFlags( XlibSurfaceCreateFlagsKHR flags_ )
8814 {
8815 flags = flags_;
8816 return *this;
8817 }
8818
8819 XlibSurfaceCreateInfoKHR& setDpy( Display* dpy_ )
8820 {
8821 dpy = dpy_;
8822 return *this;
8823 }
8824
8825 XlibSurfaceCreateInfoKHR& setWindow( Window window_ )
8826 {
8827 window = window_;
8828 return *this;
8829 }
8830
8831 operator const VkXlibSurfaceCreateInfoKHR&() const
8832 {
8833 return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>(this);
8834 }
8835
8836 bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const
8837 {
8838 return ( sType == rhs.sType )
8839 && ( pNext == rhs.pNext )
8840 && ( flags == rhs.flags )
8841 && ( dpy == rhs.dpy )
8842 && ( window == rhs.window );
8843 }
8844
8845 bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const
8846 {
8847 return !operator==( rhs );
8848 }
8849
8850 private:
8851 StructureType sType;
8852
8853 public:
8854 const void* pNext;
8855 XlibSurfaceCreateFlagsKHR flags;
8856 Display* dpy;
8857 Window window;
8858 };
8859 static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8860#endif /*VK_USE_PLATFORM_XLIB_KHR*/
8861
8862#ifdef VK_USE_PLATFORM_XCB_KHR
8863 struct XcbSurfaceCreateInfoKHR
8864 {
8865 XcbSurfaceCreateInfoKHR( XcbSurfaceCreateFlagsKHR flags_ = XcbSurfaceCreateFlagsKHR(), xcb_connection_t* connection_ = nullptr, xcb_window_t window_ = 0 )
8866 : sType( StructureType::eXcbSurfaceCreateInfoKHR )
8867 , pNext( nullptr )
8868 , flags( flags_ )
8869 , connection( connection_ )
8870 , window( window_ )
8871 {
8872 }
8873
8874 XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs )
8875 {
8876 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8877 }
8878
8879 XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs )
8880 {
8881 memcpy( this, &rhs, sizeof(XcbSurfaceCreateInfoKHR) );
8882 return *this;
8883 }
8884
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008885 XcbSurfaceCreateInfoKHR& setPNext( const void* pNext_ )
8886 {
8887 pNext = pNext_;
8888 return *this;
8889 }
8890
8891 XcbSurfaceCreateInfoKHR& setFlags( XcbSurfaceCreateFlagsKHR flags_ )
8892 {
8893 flags = flags_;
8894 return *this;
8895 }
8896
8897 XcbSurfaceCreateInfoKHR& setConnection( xcb_connection_t* connection_ )
8898 {
8899 connection = connection_;
8900 return *this;
8901 }
8902
8903 XcbSurfaceCreateInfoKHR& setWindow( xcb_window_t window_ )
8904 {
8905 window = window_;
8906 return *this;
8907 }
8908
8909 operator const VkXcbSurfaceCreateInfoKHR&() const
8910 {
8911 return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>(this);
8912 }
8913
8914 bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const
8915 {
8916 return ( sType == rhs.sType )
8917 && ( pNext == rhs.pNext )
8918 && ( flags == rhs.flags )
8919 && ( connection == rhs.connection )
8920 && ( window == rhs.window );
8921 }
8922
8923 bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const
8924 {
8925 return !operator==( rhs );
8926 }
8927
8928 private:
8929 StructureType sType;
8930
8931 public:
8932 const void* pNext;
8933 XcbSurfaceCreateFlagsKHR flags;
8934 xcb_connection_t* connection;
8935 xcb_window_t window;
8936 };
8937 static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
8938#endif /*VK_USE_PLATFORM_XCB_KHR*/
8939
8940 struct DebugMarkerMarkerInfoEXT
8941 {
8942 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = nullptr, std::array<float,4> const& color_ = { { 0, 0, 0, 0 } } )
8943 : sType( StructureType::eDebugMarkerMarkerInfoEXT )
8944 , pNext( nullptr )
8945 , pMarkerName( pMarkerName_ )
8946 {
8947 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8948 }
8949
8950 DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs )
8951 {
8952 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8953 }
8954
8955 DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs )
8956 {
8957 memcpy( this, &rhs, sizeof(DebugMarkerMarkerInfoEXT) );
8958 return *this;
8959 }
8960
Lenny Komowbed9b5c2016-08-11 11:23:15 -06008961 DebugMarkerMarkerInfoEXT& setPNext( const void* pNext_ )
8962 {
8963 pNext = pNext_;
8964 return *this;
8965 }
8966
8967 DebugMarkerMarkerInfoEXT& setPMarkerName( const char* pMarkerName_ )
8968 {
8969 pMarkerName = pMarkerName_;
8970 return *this;
8971 }
8972
8973 DebugMarkerMarkerInfoEXT& setColor( std::array<float,4> color_ )
8974 {
8975 memcpy( &color, color_.data(), 4 * sizeof( float ) );
8976 return *this;
8977 }
8978
8979 operator const VkDebugMarkerMarkerInfoEXT&() const
8980 {
8981 return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>(this);
8982 }
8983
8984 bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const
8985 {
8986 return ( sType == rhs.sType )
8987 && ( pNext == rhs.pNext )
8988 && ( pMarkerName == rhs.pMarkerName )
8989 && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 );
8990 }
8991
8992 bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const
8993 {
8994 return !operator==( rhs );
8995 }
8996
8997 private:
8998 StructureType sType;
8999
9000 public:
9001 const void* pNext;
9002 const char* pMarkerName;
9003 float color[4];
9004 };
9005 static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
9006
9007 struct DedicatedAllocationImageCreateInfoNV
9008 {
9009 DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
9010 : sType( StructureType::eDedicatedAllocationImageCreateInfoNV )
9011 , pNext( nullptr )
9012 , dedicatedAllocation( dedicatedAllocation_ )
9013 {
9014 }
9015
9016 DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs )
9017 {
9018 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
9019 }
9020
9021 DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs )
9022 {
9023 memcpy( this, &rhs, sizeof(DedicatedAllocationImageCreateInfoNV) );
9024 return *this;
9025 }
9026
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009027 DedicatedAllocationImageCreateInfoNV& setPNext( const void* pNext_ )
9028 {
9029 pNext = pNext_;
9030 return *this;
9031 }
9032
9033 DedicatedAllocationImageCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
9034 {
9035 dedicatedAllocation = dedicatedAllocation_;
9036 return *this;
9037 }
9038
9039 operator const VkDedicatedAllocationImageCreateInfoNV&() const
9040 {
9041 return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>(this);
9042 }
9043
9044 bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const
9045 {
9046 return ( sType == rhs.sType )
9047 && ( pNext == rhs.pNext )
9048 && ( dedicatedAllocation == rhs.dedicatedAllocation );
9049 }
9050
9051 bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const
9052 {
9053 return !operator==( rhs );
9054 }
9055
9056 private:
9057 StructureType sType;
9058
9059 public:
9060 const void* pNext;
9061 Bool32 dedicatedAllocation;
9062 };
9063 static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
9064
9065 struct DedicatedAllocationBufferCreateInfoNV
9066 {
9067 DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = 0 )
9068 : sType( StructureType::eDedicatedAllocationBufferCreateInfoNV )
9069 , pNext( nullptr )
9070 , dedicatedAllocation( dedicatedAllocation_ )
9071 {
9072 }
9073
9074 DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
9075 {
9076 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
9077 }
9078
9079 DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs )
9080 {
9081 memcpy( this, &rhs, sizeof(DedicatedAllocationBufferCreateInfoNV) );
9082 return *this;
9083 }
9084
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009085 DedicatedAllocationBufferCreateInfoNV& setPNext( const void* pNext_ )
9086 {
9087 pNext = pNext_;
9088 return *this;
9089 }
9090
9091 DedicatedAllocationBufferCreateInfoNV& setDedicatedAllocation( Bool32 dedicatedAllocation_ )
9092 {
9093 dedicatedAllocation = dedicatedAllocation_;
9094 return *this;
9095 }
9096
9097 operator const VkDedicatedAllocationBufferCreateInfoNV&() const
9098 {
9099 return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>(this);
9100 }
9101
9102 bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
9103 {
9104 return ( sType == rhs.sType )
9105 && ( pNext == rhs.pNext )
9106 && ( dedicatedAllocation == rhs.dedicatedAllocation );
9107 }
9108
9109 bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const
9110 {
9111 return !operator==( rhs );
9112 }
9113
9114 private:
9115 StructureType sType;
9116
9117 public:
9118 const void* pNext;
9119 Bool32 dedicatedAllocation;
9120 };
9121 static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
9122
9123 struct DedicatedAllocationMemoryAllocateInfoNV
9124 {
9125 DedicatedAllocationMemoryAllocateInfoNV( Image image_ = Image(), Buffer buffer_ = Buffer() )
9126 : sType( StructureType::eDedicatedAllocationMemoryAllocateInfoNV )
9127 , pNext( nullptr )
9128 , image( image_ )
9129 , buffer( buffer_ )
9130 {
9131 }
9132
9133 DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
9134 {
9135 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
9136 }
9137
9138 DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs )
9139 {
9140 memcpy( this, &rhs, sizeof(DedicatedAllocationMemoryAllocateInfoNV) );
9141 return *this;
9142 }
9143
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009144 DedicatedAllocationMemoryAllocateInfoNV& setPNext( const void* pNext_ )
9145 {
9146 pNext = pNext_;
9147 return *this;
9148 }
9149
9150 DedicatedAllocationMemoryAllocateInfoNV& setImage( Image image_ )
9151 {
9152 image = image_;
9153 return *this;
9154 }
9155
9156 DedicatedAllocationMemoryAllocateInfoNV& setBuffer( Buffer buffer_ )
9157 {
9158 buffer = buffer_;
9159 return *this;
9160 }
9161
9162 operator const VkDedicatedAllocationMemoryAllocateInfoNV&() const
9163 {
9164 return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>(this);
9165 }
9166
9167 bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
9168 {
9169 return ( sType == rhs.sType )
9170 && ( pNext == rhs.pNext )
9171 && ( image == rhs.image )
9172 && ( buffer == rhs.buffer );
9173 }
9174
9175 bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const
9176 {
9177 return !operator==( rhs );
9178 }
9179
9180 private:
9181 StructureType sType;
9182
9183 public:
9184 const void* pNext;
9185 Image image;
9186 Buffer buffer;
9187 };
9188 static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
9189
Lenny Komow6501c122016-08-31 15:03:49 -06009190#ifdef VK_USE_PLATFORM_WIN32_KHR
9191 struct ExportMemoryWin32HandleInfoNV
9192 {
9193 ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = nullptr, DWORD dwAccess_ = 0 )
9194 : sType( StructureType::eExportMemoryWin32HandleInfoNV )
9195 , pNext( nullptr )
9196 , pAttributes( pAttributes_ )
9197 , dwAccess( dwAccess_ )
9198 {
9199 }
9200
9201 ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs )
9202 {
9203 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
9204 }
9205
9206 ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs )
9207 {
9208 memcpy( this, &rhs, sizeof(ExportMemoryWin32HandleInfoNV) );
9209 return *this;
9210 }
9211
Lenny Komow6501c122016-08-31 15:03:49 -06009212 ExportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
9213 {
9214 pNext = pNext_;
9215 return *this;
9216 }
9217
9218 ExportMemoryWin32HandleInfoNV& setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ )
9219 {
9220 pAttributes = pAttributes_;
9221 return *this;
9222 }
9223
9224 ExportMemoryWin32HandleInfoNV& setDwAccess( DWORD dwAccess_ )
9225 {
9226 dwAccess = dwAccess_;
9227 return *this;
9228 }
9229
9230 operator const VkExportMemoryWin32HandleInfoNV&() const
9231 {
9232 return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>(this);
9233 }
9234
9235 bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const
9236 {
9237 return ( sType == rhs.sType )
9238 && ( pNext == rhs.pNext )
9239 && ( pAttributes == rhs.pAttributes )
9240 && ( dwAccess == rhs.dwAccess );
9241 }
9242
9243 bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const
9244 {
9245 return !operator==( rhs );
9246 }
9247
9248 private:
9249 StructureType sType;
9250
9251 public:
9252 const void* pNext;
9253 const SECURITY_ATTRIBUTES* pAttributes;
9254 DWORD dwAccess;
9255 };
9256 static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
9257#endif /*VK_USE_PLATFORM_WIN32_KHR*/
9258
9259#ifdef VK_USE_PLATFORM_WIN32_KHR
9260 struct Win32KeyedMutexAcquireReleaseInfoNV
9261 {
9262 Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = 0, const DeviceMemory* pAcquireSyncs_ = nullptr, const uint64_t* pAcquireKeys_ = nullptr, const uint32_t* pAcquireTimeoutMilliseconds_ = nullptr, uint32_t releaseCount_ = 0, const DeviceMemory* pReleaseSyncs_ = nullptr, const uint64_t* pReleaseKeys_ = nullptr )
9263 : sType( StructureType::eWin32KeyedMutexAcquireReleaseInfoNV )
9264 , pNext( nullptr )
9265 , acquireCount( acquireCount_ )
9266 , pAcquireSyncs( pAcquireSyncs_ )
9267 , pAcquireKeys( pAcquireKeys_ )
9268 , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ )
9269 , releaseCount( releaseCount_ )
9270 , pReleaseSyncs( pReleaseSyncs_ )
9271 , pReleaseKeys( pReleaseKeys_ )
9272 {
9273 }
9274
9275 Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9276 {
9277 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9278 }
9279
9280 Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs )
9281 {
9282 memcpy( this, &rhs, sizeof(Win32KeyedMutexAcquireReleaseInfoNV) );
9283 return *this;
9284 }
9285
Lenny Komow6501c122016-08-31 15:03:49 -06009286 Win32KeyedMutexAcquireReleaseInfoNV& setPNext( const void* pNext_ )
9287 {
9288 pNext = pNext_;
9289 return *this;
9290 }
9291
9292 Win32KeyedMutexAcquireReleaseInfoNV& setAcquireCount( uint32_t acquireCount_ )
9293 {
9294 acquireCount = acquireCount_;
9295 return *this;
9296 }
9297
9298 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireSyncs( const DeviceMemory* pAcquireSyncs_ )
9299 {
9300 pAcquireSyncs = pAcquireSyncs_;
9301 return *this;
9302 }
9303
9304 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireKeys( const uint64_t* pAcquireKeys_ )
9305 {
9306 pAcquireKeys = pAcquireKeys_;
9307 return *this;
9308 }
9309
9310 Win32KeyedMutexAcquireReleaseInfoNV& setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ )
9311 {
9312 pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
9313 return *this;
9314 }
9315
9316 Win32KeyedMutexAcquireReleaseInfoNV& setReleaseCount( uint32_t releaseCount_ )
9317 {
9318 releaseCount = releaseCount_;
9319 return *this;
9320 }
9321
9322 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseSyncs( const DeviceMemory* pReleaseSyncs_ )
9323 {
9324 pReleaseSyncs = pReleaseSyncs_;
9325 return *this;
9326 }
9327
9328 Win32KeyedMutexAcquireReleaseInfoNV& setPReleaseKeys( const uint64_t* pReleaseKeys_ )
9329 {
9330 pReleaseKeys = pReleaseKeys_;
9331 return *this;
9332 }
9333
9334 operator const VkWin32KeyedMutexAcquireReleaseInfoNV&() const
9335 {
9336 return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>(this);
9337 }
9338
9339 bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9340 {
9341 return ( sType == rhs.sType )
9342 && ( pNext == rhs.pNext )
9343 && ( acquireCount == rhs.acquireCount )
9344 && ( pAcquireSyncs == rhs.pAcquireSyncs )
9345 && ( pAcquireKeys == rhs.pAcquireKeys )
9346 && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
9347 && ( releaseCount == rhs.releaseCount )
9348 && ( pReleaseSyncs == rhs.pReleaseSyncs )
9349 && ( pReleaseKeys == rhs.pReleaseKeys );
9350 }
9351
9352 bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const
9353 {
9354 return !operator==( rhs );
9355 }
9356
9357 private:
9358 StructureType sType;
9359
9360 public:
9361 const void* pNext;
9362 uint32_t acquireCount;
9363 const DeviceMemory* pAcquireSyncs;
9364 const uint64_t* pAcquireKeys;
9365 const uint32_t* pAcquireTimeoutMilliseconds;
9366 uint32_t releaseCount;
9367 const DeviceMemory* pReleaseSyncs;
9368 const uint64_t* pReleaseKeys;
9369 };
9370 static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
9371#endif /*VK_USE_PLATFORM_WIN32_KHR*/
9372
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009373 struct DeviceGeneratedCommandsFeaturesNVX
9374 {
9375 DeviceGeneratedCommandsFeaturesNVX( Bool32 computeBindingPointSupport_ = 0 )
9376 : sType( StructureType::eDeviceGeneratedCommandsFeaturesNVX )
9377 , pNext( nullptr )
9378 , computeBindingPointSupport( computeBindingPointSupport_ )
9379 {
9380 }
9381
9382 DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9383 {
9384 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9385 }
9386
9387 DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs )
9388 {
9389 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsFeaturesNVX) );
9390 return *this;
9391 }
9392
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009393 DeviceGeneratedCommandsFeaturesNVX& setPNext( const void* pNext_ )
9394 {
9395 pNext = pNext_;
9396 return *this;
9397 }
9398
9399 DeviceGeneratedCommandsFeaturesNVX& setComputeBindingPointSupport( Bool32 computeBindingPointSupport_ )
9400 {
9401 computeBindingPointSupport = computeBindingPointSupport_;
9402 return *this;
9403 }
9404
9405 operator const VkDeviceGeneratedCommandsFeaturesNVX&() const
9406 {
9407 return *reinterpret_cast<const VkDeviceGeneratedCommandsFeaturesNVX*>(this);
9408 }
9409
9410 bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9411 {
9412 return ( sType == rhs.sType )
9413 && ( pNext == rhs.pNext )
9414 && ( computeBindingPointSupport == rhs.computeBindingPointSupport );
9415 }
9416
9417 bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const
9418 {
9419 return !operator==( rhs );
9420 }
9421
9422 private:
9423 StructureType sType;
9424
9425 public:
9426 const void* pNext;
9427 Bool32 computeBindingPointSupport;
9428 };
9429 static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" );
9430
9431 struct DeviceGeneratedCommandsLimitsNVX
9432 {
9433 DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = 0, uint32_t maxObjectEntryCounts_ = 0, uint32_t minSequenceCountBufferOffsetAlignment_ = 0, uint32_t minSequenceIndexBufferOffsetAlignment_ = 0, uint32_t minCommandsTokenBufferOffsetAlignment_ = 0 )
9434 : sType( StructureType::eDeviceGeneratedCommandsLimitsNVX )
9435 , pNext( nullptr )
9436 , maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ )
9437 , maxObjectEntryCounts( maxObjectEntryCounts_ )
9438 , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ )
9439 , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ )
9440 , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ )
9441 {
9442 }
9443
9444 DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9445 {
9446 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9447 }
9448
9449 DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs )
9450 {
9451 memcpy( this, &rhs, sizeof(DeviceGeneratedCommandsLimitsNVX) );
9452 return *this;
9453 }
9454
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009455 DeviceGeneratedCommandsLimitsNVX& setPNext( const void* pNext_ )
9456 {
9457 pNext = pNext_;
9458 return *this;
9459 }
9460
9461 DeviceGeneratedCommandsLimitsNVX& setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ )
9462 {
9463 maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_;
9464 return *this;
9465 }
9466
9467 DeviceGeneratedCommandsLimitsNVX& setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ )
9468 {
9469 maxObjectEntryCounts = maxObjectEntryCounts_;
9470 return *this;
9471 }
9472
9473 DeviceGeneratedCommandsLimitsNVX& setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ )
9474 {
9475 minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_;
9476 return *this;
9477 }
9478
9479 DeviceGeneratedCommandsLimitsNVX& setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ )
9480 {
9481 minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_;
9482 return *this;
9483 }
9484
9485 DeviceGeneratedCommandsLimitsNVX& setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ )
9486 {
9487 minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_;
9488 return *this;
9489 }
9490
9491 operator const VkDeviceGeneratedCommandsLimitsNVX&() const
9492 {
9493 return *reinterpret_cast<const VkDeviceGeneratedCommandsLimitsNVX*>(this);
9494 }
9495
9496 bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9497 {
9498 return ( sType == rhs.sType )
9499 && ( pNext == rhs.pNext )
9500 && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount )
9501 && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts )
9502 && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment )
9503 && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment )
9504 && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment );
9505 }
9506
9507 bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const
9508 {
9509 return !operator==( rhs );
9510 }
9511
9512 private:
9513 StructureType sType;
9514
9515 public:
9516 const void* pNext;
9517 uint32_t maxIndirectCommandsLayoutTokenCount;
9518 uint32_t maxObjectEntryCounts;
9519 uint32_t minSequenceCountBufferOffsetAlignment;
9520 uint32_t minSequenceIndexBufferOffsetAlignment;
9521 uint32_t minCommandsTokenBufferOffsetAlignment;
9522 };
9523 static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" );
9524
9525 struct CmdReserveSpaceForCommandsInfoNVX
9526 {
9527 CmdReserveSpaceForCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t maxSequencesCount_ = 0 )
9528 : sType( StructureType::eCmdReserveSpaceForCommandsInfoNVX )
9529 , pNext( nullptr )
9530 , objectTable( objectTable_ )
9531 , indirectCommandsLayout( indirectCommandsLayout_ )
9532 , maxSequencesCount( maxSequencesCount_ )
9533 {
9534 }
9535
9536 CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9537 {
9538 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9539 }
9540
9541 CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs )
9542 {
9543 memcpy( this, &rhs, sizeof(CmdReserveSpaceForCommandsInfoNVX) );
9544 return *this;
9545 }
9546
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009547 CmdReserveSpaceForCommandsInfoNVX& setPNext( const void* pNext_ )
9548 {
9549 pNext = pNext_;
9550 return *this;
9551 }
9552
9553 CmdReserveSpaceForCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
9554 {
9555 objectTable = objectTable_;
9556 return *this;
9557 }
9558
9559 CmdReserveSpaceForCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
9560 {
9561 indirectCommandsLayout = indirectCommandsLayout_;
9562 return *this;
9563 }
9564
9565 CmdReserveSpaceForCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
9566 {
9567 maxSequencesCount = maxSequencesCount_;
9568 return *this;
9569 }
9570
9571 operator const VkCmdReserveSpaceForCommandsInfoNVX&() const
9572 {
9573 return *reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>(this);
9574 }
9575
9576 bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9577 {
9578 return ( sType == rhs.sType )
9579 && ( pNext == rhs.pNext )
9580 && ( objectTable == rhs.objectTable )
9581 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
9582 && ( maxSequencesCount == rhs.maxSequencesCount );
9583 }
9584
9585 bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const
9586 {
9587 return !operator==( rhs );
9588 }
9589
9590 private:
9591 StructureType sType;
9592
9593 public:
9594 const void* pNext;
9595 ObjectTableNVX objectTable;
9596 IndirectCommandsLayoutNVX indirectCommandsLayout;
9597 uint32_t maxSequencesCount;
9598 };
9599 static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" );
9600
Mark Young39389872017-01-19 21:10:49 -07009601 struct PhysicalDeviceFeatures2KHR
9602 {
9603 PhysicalDeviceFeatures2KHR( PhysicalDeviceFeatures features_ = PhysicalDeviceFeatures() )
9604 : sType( StructureType::ePhysicalDeviceFeatures2KHR )
9605 , pNext( nullptr )
9606 , features( features_ )
9607 {
9608 }
9609
9610 PhysicalDeviceFeatures2KHR( VkPhysicalDeviceFeatures2KHR const & rhs )
9611 {
9612 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures2KHR) );
9613 }
9614
9615 PhysicalDeviceFeatures2KHR& operator=( VkPhysicalDeviceFeatures2KHR const & rhs )
9616 {
9617 memcpy( this, &rhs, sizeof(PhysicalDeviceFeatures2KHR) );
9618 return *this;
9619 }
9620
Mark Young39389872017-01-19 21:10:49 -07009621 PhysicalDeviceFeatures2KHR& setPNext( void* pNext_ )
9622 {
9623 pNext = pNext_;
9624 return *this;
9625 }
9626
9627 PhysicalDeviceFeatures2KHR& setFeatures( PhysicalDeviceFeatures features_ )
9628 {
9629 features = features_;
9630 return *this;
9631 }
9632
9633 operator const VkPhysicalDeviceFeatures2KHR&() const
9634 {
9635 return *reinterpret_cast<const VkPhysicalDeviceFeatures2KHR*>(this);
9636 }
9637
9638 bool operator==( PhysicalDeviceFeatures2KHR const& rhs ) const
9639 {
9640 return ( sType == rhs.sType )
9641 && ( pNext == rhs.pNext )
9642 && ( features == rhs.features );
9643 }
9644
9645 bool operator!=( PhysicalDeviceFeatures2KHR const& rhs ) const
9646 {
9647 return !operator==( rhs );
9648 }
9649
9650 private:
9651 StructureType sType;
9652
9653 public:
9654 void* pNext;
9655 PhysicalDeviceFeatures features;
9656 };
9657 static_assert( sizeof( PhysicalDeviceFeatures2KHR ) == sizeof( VkPhysicalDeviceFeatures2KHR ), "struct and wrapper have different size!" );
9658
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009659 enum class SubpassContents
9660 {
9661 eInline = VK_SUBPASS_CONTENTS_INLINE,
9662 eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
9663 };
9664
9665 struct PresentInfoKHR
9666 {
9667 PresentInfoKHR( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t swapchainCount_ = 0, const SwapchainKHR* pSwapchains_ = nullptr, const uint32_t* pImageIndices_ = nullptr, Result* pResults_ = nullptr )
9668 : sType( StructureType::ePresentInfoKHR )
9669 , pNext( nullptr )
9670 , waitSemaphoreCount( waitSemaphoreCount_ )
9671 , pWaitSemaphores( pWaitSemaphores_ )
9672 , swapchainCount( swapchainCount_ )
9673 , pSwapchains( pSwapchains_ )
9674 , pImageIndices( pImageIndices_ )
9675 , pResults( pResults_ )
9676 {
9677 }
9678
9679 PresentInfoKHR( VkPresentInfoKHR const & rhs )
9680 {
9681 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9682 }
9683
9684 PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs )
9685 {
9686 memcpy( this, &rhs, sizeof(PresentInfoKHR) );
9687 return *this;
9688 }
9689
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009690 PresentInfoKHR& setPNext( const void* pNext_ )
9691 {
9692 pNext = pNext_;
9693 return *this;
9694 }
9695
9696 PresentInfoKHR& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
9697 {
9698 waitSemaphoreCount = waitSemaphoreCount_;
9699 return *this;
9700 }
9701
9702 PresentInfoKHR& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
9703 {
9704 pWaitSemaphores = pWaitSemaphores_;
9705 return *this;
9706 }
9707
9708 PresentInfoKHR& setSwapchainCount( uint32_t swapchainCount_ )
9709 {
9710 swapchainCount = swapchainCount_;
9711 return *this;
9712 }
9713
9714 PresentInfoKHR& setPSwapchains( const SwapchainKHR* pSwapchains_ )
9715 {
9716 pSwapchains = pSwapchains_;
9717 return *this;
9718 }
9719
9720 PresentInfoKHR& setPImageIndices( const uint32_t* pImageIndices_ )
9721 {
9722 pImageIndices = pImageIndices_;
9723 return *this;
9724 }
9725
9726 PresentInfoKHR& setPResults( Result* pResults_ )
9727 {
9728 pResults = pResults_;
9729 return *this;
9730 }
9731
9732 operator const VkPresentInfoKHR&() const
9733 {
9734 return *reinterpret_cast<const VkPresentInfoKHR*>(this);
9735 }
9736
9737 bool operator==( PresentInfoKHR const& rhs ) const
9738 {
9739 return ( sType == rhs.sType )
9740 && ( pNext == rhs.pNext )
9741 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
9742 && ( pWaitSemaphores == rhs.pWaitSemaphores )
9743 && ( swapchainCount == rhs.swapchainCount )
9744 && ( pSwapchains == rhs.pSwapchains )
9745 && ( pImageIndices == rhs.pImageIndices )
9746 && ( pResults == rhs.pResults );
9747 }
9748
9749 bool operator!=( PresentInfoKHR const& rhs ) const
9750 {
9751 return !operator==( rhs );
9752 }
9753
9754 private:
9755 StructureType sType;
9756
9757 public:
9758 const void* pNext;
9759 uint32_t waitSemaphoreCount;
9760 const Semaphore* pWaitSemaphores;
9761 uint32_t swapchainCount;
9762 const SwapchainKHR* pSwapchains;
9763 const uint32_t* pImageIndices;
9764 Result* pResults;
9765 };
9766 static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
9767
9768 enum class DynamicState
9769 {
9770 eViewport = VK_DYNAMIC_STATE_VIEWPORT,
9771 eScissor = VK_DYNAMIC_STATE_SCISSOR,
9772 eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
9773 eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
9774 eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
9775 eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
9776 eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
9777 eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
9778 eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE
9779 };
9780
9781 struct PipelineDynamicStateCreateInfo
9782 {
9783 PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = PipelineDynamicStateCreateFlags(), uint32_t dynamicStateCount_ = 0, const DynamicState* pDynamicStates_ = nullptr )
9784 : sType( StructureType::ePipelineDynamicStateCreateInfo )
9785 , pNext( nullptr )
9786 , flags( flags_ )
9787 , dynamicStateCount( dynamicStateCount_ )
9788 , pDynamicStates( pDynamicStates_ )
9789 {
9790 }
9791
9792 PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs )
9793 {
9794 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9795 }
9796
9797 PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs )
9798 {
9799 memcpy( this, &rhs, sizeof(PipelineDynamicStateCreateInfo) );
9800 return *this;
9801 }
9802
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009803 PipelineDynamicStateCreateInfo& setPNext( const void* pNext_ )
9804 {
9805 pNext = pNext_;
9806 return *this;
9807 }
9808
9809 PipelineDynamicStateCreateInfo& setFlags( PipelineDynamicStateCreateFlags flags_ )
9810 {
9811 flags = flags_;
9812 return *this;
9813 }
9814
9815 PipelineDynamicStateCreateInfo& setDynamicStateCount( uint32_t dynamicStateCount_ )
9816 {
9817 dynamicStateCount = dynamicStateCount_;
9818 return *this;
9819 }
9820
9821 PipelineDynamicStateCreateInfo& setPDynamicStates( const DynamicState* pDynamicStates_ )
9822 {
9823 pDynamicStates = pDynamicStates_;
9824 return *this;
9825 }
9826
9827 operator const VkPipelineDynamicStateCreateInfo&() const
9828 {
9829 return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>(this);
9830 }
9831
9832 bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const
9833 {
9834 return ( sType == rhs.sType )
9835 && ( pNext == rhs.pNext )
9836 && ( flags == rhs.flags )
9837 && ( dynamicStateCount == rhs.dynamicStateCount )
9838 && ( pDynamicStates == rhs.pDynamicStates );
9839 }
9840
9841 bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const
9842 {
9843 return !operator==( rhs );
9844 }
9845
9846 private:
9847 StructureType sType;
9848
9849 public:
9850 const void* pNext;
9851 PipelineDynamicStateCreateFlags flags;
9852 uint32_t dynamicStateCount;
9853 const DynamicState* pDynamicStates;
9854 };
9855 static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
9856
9857 enum class QueueFlagBits
9858 {
9859 eGraphics = VK_QUEUE_GRAPHICS_BIT,
9860 eCompute = VK_QUEUE_COMPUTE_BIT,
9861 eTransfer = VK_QUEUE_TRANSFER_BIT,
9862 eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT
9863 };
9864
9865 using QueueFlags = Flags<QueueFlagBits, VkQueueFlags>;
9866
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009867 VULKAN_HPP_INLINE QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009868 {
9869 return QueueFlags( bit0 ) | bit1;
9870 }
9871
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009872 VULKAN_HPP_INLINE QueueFlags operator~( QueueFlagBits bits )
9873 {
9874 return ~( QueueFlags( bits ) );
9875 }
9876
9877 template <> struct FlagTraits<QueueFlagBits>
9878 {
9879 enum
9880 {
9881 allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding)
9882 };
9883 };
9884
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009885 struct QueueFamilyProperties
9886 {
9887 operator const VkQueueFamilyProperties&() const
9888 {
9889 return *reinterpret_cast<const VkQueueFamilyProperties*>(this);
9890 }
9891
9892 bool operator==( QueueFamilyProperties const& rhs ) const
9893 {
9894 return ( queueFlags == rhs.queueFlags )
9895 && ( queueCount == rhs.queueCount )
9896 && ( timestampValidBits == rhs.timestampValidBits )
9897 && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
9898 }
9899
9900 bool operator!=( QueueFamilyProperties const& rhs ) const
9901 {
9902 return !operator==( rhs );
9903 }
9904
9905 QueueFlags queueFlags;
9906 uint32_t queueCount;
9907 uint32_t timestampValidBits;
9908 Extent3D minImageTransferGranularity;
9909 };
9910 static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
9911
Mark Young39389872017-01-19 21:10:49 -07009912 struct QueueFamilyProperties2KHR
9913 {
9914 operator const VkQueueFamilyProperties2KHR&() const
9915 {
9916 return *reinterpret_cast<const VkQueueFamilyProperties2KHR*>(this);
9917 }
9918
9919 bool operator==( QueueFamilyProperties2KHR const& rhs ) const
9920 {
9921 return ( sType == rhs.sType )
9922 && ( pNext == rhs.pNext )
9923 && ( queueFamilyProperties == rhs.queueFamilyProperties );
9924 }
9925
9926 bool operator!=( QueueFamilyProperties2KHR const& rhs ) const
9927 {
9928 return !operator==( rhs );
9929 }
9930
9931 private:
9932 StructureType sType;
9933
9934 public:
9935 void* pNext;
9936 QueueFamilyProperties queueFamilyProperties;
9937 };
9938 static_assert( sizeof( QueueFamilyProperties2KHR ) == sizeof( VkQueueFamilyProperties2KHR ), "struct and wrapper have different size!" );
9939
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009940 enum class MemoryPropertyFlagBits
9941 {
9942 eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
9943 eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
9944 eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
9945 eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
9946 eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT
9947 };
9948
9949 using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits, VkMemoryPropertyFlags>;
9950
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009951 VULKAN_HPP_INLINE MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009952 {
9953 return MemoryPropertyFlags( bit0 ) | bit1;
9954 }
9955
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009956 VULKAN_HPP_INLINE MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits )
9957 {
9958 return ~( MemoryPropertyFlags( bits ) );
9959 }
9960
9961 template <> struct FlagTraits<MemoryPropertyFlagBits>
9962 {
9963 enum
9964 {
9965 allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated)
9966 };
9967 };
9968
Lenny Komowbed9b5c2016-08-11 11:23:15 -06009969 struct MemoryType
9970 {
9971 operator const VkMemoryType&() const
9972 {
9973 return *reinterpret_cast<const VkMemoryType*>(this);
9974 }
9975
9976 bool operator==( MemoryType const& rhs ) const
9977 {
9978 return ( propertyFlags == rhs.propertyFlags )
9979 && ( heapIndex == rhs.heapIndex );
9980 }
9981
9982 bool operator!=( MemoryType const& rhs ) const
9983 {
9984 return !operator==( rhs );
9985 }
9986
9987 MemoryPropertyFlags propertyFlags;
9988 uint32_t heapIndex;
9989 };
9990 static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
9991
9992 enum class MemoryHeapFlagBits
9993 {
9994 eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT
9995 };
9996
9997 using MemoryHeapFlags = Flags<MemoryHeapFlagBits, VkMemoryHeapFlags>;
9998
Mark Lobodzinski2d589822016-12-12 09:44:34 -07009999 VULKAN_HPP_INLINE MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010000 {
10001 return MemoryHeapFlags( bit0 ) | bit1;
10002 }
10003
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010004 VULKAN_HPP_INLINE MemoryHeapFlags operator~( MemoryHeapFlagBits bits )
10005 {
10006 return ~( MemoryHeapFlags( bits ) );
10007 }
10008
10009 template <> struct FlagTraits<MemoryHeapFlagBits>
10010 {
10011 enum
10012 {
10013 allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal)
10014 };
10015 };
10016
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010017 struct MemoryHeap
10018 {
10019 operator const VkMemoryHeap&() const
10020 {
10021 return *reinterpret_cast<const VkMemoryHeap*>(this);
10022 }
10023
10024 bool operator==( MemoryHeap const& rhs ) const
10025 {
10026 return ( size == rhs.size )
10027 && ( flags == rhs.flags );
10028 }
10029
10030 bool operator!=( MemoryHeap const& rhs ) const
10031 {
10032 return !operator==( rhs );
10033 }
10034
10035 DeviceSize size;
10036 MemoryHeapFlags flags;
10037 };
10038 static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
10039
10040 struct PhysicalDeviceMemoryProperties
10041 {
10042 operator const VkPhysicalDeviceMemoryProperties&() const
10043 {
10044 return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>(this);
10045 }
10046
10047 bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const
10048 {
10049 return ( memoryTypeCount == rhs.memoryTypeCount )
10050 && ( memcmp( memoryTypes, rhs.memoryTypes, VK_MAX_MEMORY_TYPES * sizeof( MemoryType ) ) == 0 )
10051 && ( memoryHeapCount == rhs.memoryHeapCount )
10052 && ( memcmp( memoryHeaps, rhs.memoryHeaps, VK_MAX_MEMORY_HEAPS * sizeof( MemoryHeap ) ) == 0 );
10053 }
10054
10055 bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const
10056 {
10057 return !operator==( rhs );
10058 }
10059
10060 uint32_t memoryTypeCount;
10061 MemoryType memoryTypes[VK_MAX_MEMORY_TYPES];
10062 uint32_t memoryHeapCount;
10063 MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS];
10064 };
10065 static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
10066
Mark Young39389872017-01-19 21:10:49 -070010067 struct PhysicalDeviceMemoryProperties2KHR
10068 {
10069 operator const VkPhysicalDeviceMemoryProperties2KHR&() const
10070 {
10071 return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2KHR*>(this);
10072 }
10073
10074 bool operator==( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
10075 {
10076 return ( sType == rhs.sType )
10077 && ( pNext == rhs.pNext )
10078 && ( memoryProperties == rhs.memoryProperties );
10079 }
10080
10081 bool operator!=( PhysicalDeviceMemoryProperties2KHR const& rhs ) const
10082 {
10083 return !operator==( rhs );
10084 }
10085
10086 private:
10087 StructureType sType;
10088
10089 public:
10090 void* pNext;
10091 PhysicalDeviceMemoryProperties memoryProperties;
10092 };
10093 static_assert( sizeof( PhysicalDeviceMemoryProperties2KHR ) == sizeof( VkPhysicalDeviceMemoryProperties2KHR ), "struct and wrapper have different size!" );
10094
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010095 enum class AccessFlagBits
10096 {
10097 eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
10098 eIndexRead = VK_ACCESS_INDEX_READ_BIT,
10099 eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
10100 eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
10101 eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
10102 eShaderRead = VK_ACCESS_SHADER_READ_BIT,
10103 eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
10104 eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
10105 eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
10106 eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
10107 eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
10108 eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
10109 eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
10110 eHostRead = VK_ACCESS_HOST_READ_BIT,
10111 eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
10112 eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010113 eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
10114 eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX,
10115 eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010116 };
10117
10118 using AccessFlags = Flags<AccessFlagBits, VkAccessFlags>;
10119
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010120 VULKAN_HPP_INLINE AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010121 {
10122 return AccessFlags( bit0 ) | bit1;
10123 }
10124
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010125 VULKAN_HPP_INLINE AccessFlags operator~( AccessFlagBits bits )
10126 {
10127 return ~( AccessFlags( bits ) );
10128 }
10129
10130 template <> struct FlagTraits<AccessFlagBits>
10131 {
10132 enum
10133 {
10134 allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX)
10135 };
10136 };
10137
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010138 struct MemoryBarrier
10139 {
10140 MemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags() )
10141 : sType( StructureType::eMemoryBarrier )
10142 , pNext( nullptr )
10143 , srcAccessMask( srcAccessMask_ )
10144 , dstAccessMask( dstAccessMask_ )
10145 {
10146 }
10147
10148 MemoryBarrier( VkMemoryBarrier const & rhs )
10149 {
10150 memcpy( this, &rhs, sizeof(MemoryBarrier) );
10151 }
10152
10153 MemoryBarrier& operator=( VkMemoryBarrier const & rhs )
10154 {
10155 memcpy( this, &rhs, sizeof(MemoryBarrier) );
10156 return *this;
10157 }
10158
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010159 MemoryBarrier& setPNext( const void* pNext_ )
10160 {
10161 pNext = pNext_;
10162 return *this;
10163 }
10164
10165 MemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
10166 {
10167 srcAccessMask = srcAccessMask_;
10168 return *this;
10169 }
10170
10171 MemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
10172 {
10173 dstAccessMask = dstAccessMask_;
10174 return *this;
10175 }
10176
10177 operator const VkMemoryBarrier&() const
10178 {
10179 return *reinterpret_cast<const VkMemoryBarrier*>(this);
10180 }
10181
10182 bool operator==( MemoryBarrier const& rhs ) const
10183 {
10184 return ( sType == rhs.sType )
10185 && ( pNext == rhs.pNext )
10186 && ( srcAccessMask == rhs.srcAccessMask )
10187 && ( dstAccessMask == rhs.dstAccessMask );
10188 }
10189
10190 bool operator!=( MemoryBarrier const& rhs ) const
10191 {
10192 return !operator==( rhs );
10193 }
10194
10195 private:
10196 StructureType sType;
10197
10198 public:
10199 const void* pNext;
10200 AccessFlags srcAccessMask;
10201 AccessFlags dstAccessMask;
10202 };
10203 static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
10204
10205 struct BufferMemoryBarrier
10206 {
10207 BufferMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0, DeviceSize size_ = 0 )
10208 : sType( StructureType::eBufferMemoryBarrier )
10209 , pNext( nullptr )
10210 , srcAccessMask( srcAccessMask_ )
10211 , dstAccessMask( dstAccessMask_ )
10212 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
10213 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
10214 , buffer( buffer_ )
10215 , offset( offset_ )
10216 , size( size_ )
10217 {
10218 }
10219
10220 BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs )
10221 {
10222 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
10223 }
10224
10225 BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs )
10226 {
10227 memcpy( this, &rhs, sizeof(BufferMemoryBarrier) );
10228 return *this;
10229 }
10230
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010231 BufferMemoryBarrier& setPNext( const void* pNext_ )
10232 {
10233 pNext = pNext_;
10234 return *this;
10235 }
10236
10237 BufferMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
10238 {
10239 srcAccessMask = srcAccessMask_;
10240 return *this;
10241 }
10242
10243 BufferMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
10244 {
10245 dstAccessMask = dstAccessMask_;
10246 return *this;
10247 }
10248
10249 BufferMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
10250 {
10251 srcQueueFamilyIndex = srcQueueFamilyIndex_;
10252 return *this;
10253 }
10254
10255 BufferMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
10256 {
10257 dstQueueFamilyIndex = dstQueueFamilyIndex_;
10258 return *this;
10259 }
10260
10261 BufferMemoryBarrier& setBuffer( Buffer buffer_ )
10262 {
10263 buffer = buffer_;
10264 return *this;
10265 }
10266
10267 BufferMemoryBarrier& setOffset( DeviceSize offset_ )
10268 {
10269 offset = offset_;
10270 return *this;
10271 }
10272
10273 BufferMemoryBarrier& setSize( DeviceSize size_ )
10274 {
10275 size = size_;
10276 return *this;
10277 }
10278
10279 operator const VkBufferMemoryBarrier&() const
10280 {
10281 return *reinterpret_cast<const VkBufferMemoryBarrier*>(this);
10282 }
10283
10284 bool operator==( BufferMemoryBarrier const& rhs ) const
10285 {
10286 return ( sType == rhs.sType )
10287 && ( pNext == rhs.pNext )
10288 && ( srcAccessMask == rhs.srcAccessMask )
10289 && ( dstAccessMask == rhs.dstAccessMask )
10290 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
10291 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
10292 && ( buffer == rhs.buffer )
10293 && ( offset == rhs.offset )
10294 && ( size == rhs.size );
10295 }
10296
10297 bool operator!=( BufferMemoryBarrier const& rhs ) const
10298 {
10299 return !operator==( rhs );
10300 }
10301
10302 private:
10303 StructureType sType;
10304
10305 public:
10306 const void* pNext;
10307 AccessFlags srcAccessMask;
10308 AccessFlags dstAccessMask;
10309 uint32_t srcQueueFamilyIndex;
10310 uint32_t dstQueueFamilyIndex;
10311 Buffer buffer;
10312 DeviceSize offset;
10313 DeviceSize size;
10314 };
10315 static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
10316
10317 enum class BufferUsageFlagBits
10318 {
10319 eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
10320 eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
10321 eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
10322 eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
10323 eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
10324 eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
10325 eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
10326 eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
10327 eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
10328 };
10329
10330 using BufferUsageFlags = Flags<BufferUsageFlagBits, VkBufferUsageFlags>;
10331
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010332 VULKAN_HPP_INLINE BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010333 {
10334 return BufferUsageFlags( bit0 ) | bit1;
10335 }
10336
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010337 VULKAN_HPP_INLINE BufferUsageFlags operator~( BufferUsageFlagBits bits )
10338 {
10339 return ~( BufferUsageFlags( bits ) );
10340 }
10341
10342 template <> struct FlagTraits<BufferUsageFlagBits>
10343 {
10344 enum
10345 {
10346 allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer)
10347 };
10348 };
10349
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010350 enum class BufferCreateFlagBits
10351 {
10352 eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
10353 eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
10354 eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
10355 };
10356
10357 using BufferCreateFlags = Flags<BufferCreateFlagBits, VkBufferCreateFlags>;
10358
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010359 VULKAN_HPP_INLINE BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010360 {
10361 return BufferCreateFlags( bit0 ) | bit1;
10362 }
10363
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010364 VULKAN_HPP_INLINE BufferCreateFlags operator~( BufferCreateFlagBits bits )
10365 {
10366 return ~( BufferCreateFlags( bits ) );
10367 }
10368
10369 template <> struct FlagTraits<BufferCreateFlagBits>
10370 {
10371 enum
10372 {
10373 allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased)
10374 };
10375 };
10376
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010377 struct BufferCreateInfo
10378 {
10379 BufferCreateInfo( BufferCreateFlags flags_ = BufferCreateFlags(), DeviceSize size_ = 0, BufferUsageFlags usage_ = BufferUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr )
10380 : sType( StructureType::eBufferCreateInfo )
10381 , pNext( nullptr )
10382 , flags( flags_ )
10383 , size( size_ )
10384 , usage( usage_ )
10385 , sharingMode( sharingMode_ )
10386 , queueFamilyIndexCount( queueFamilyIndexCount_ )
10387 , pQueueFamilyIndices( pQueueFamilyIndices_ )
10388 {
10389 }
10390
10391 BufferCreateInfo( VkBufferCreateInfo const & rhs )
10392 {
10393 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10394 }
10395
10396 BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs )
10397 {
10398 memcpy( this, &rhs, sizeof(BufferCreateInfo) );
10399 return *this;
10400 }
10401
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010402 BufferCreateInfo& setPNext( const void* pNext_ )
10403 {
10404 pNext = pNext_;
10405 return *this;
10406 }
10407
10408 BufferCreateInfo& setFlags( BufferCreateFlags flags_ )
10409 {
10410 flags = flags_;
10411 return *this;
10412 }
10413
10414 BufferCreateInfo& setSize( DeviceSize size_ )
10415 {
10416 size = size_;
10417 return *this;
10418 }
10419
10420 BufferCreateInfo& setUsage( BufferUsageFlags usage_ )
10421 {
10422 usage = usage_;
10423 return *this;
10424 }
10425
10426 BufferCreateInfo& setSharingMode( SharingMode sharingMode_ )
10427 {
10428 sharingMode = sharingMode_;
10429 return *this;
10430 }
10431
10432 BufferCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
10433 {
10434 queueFamilyIndexCount = queueFamilyIndexCount_;
10435 return *this;
10436 }
10437
10438 BufferCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
10439 {
10440 pQueueFamilyIndices = pQueueFamilyIndices_;
10441 return *this;
10442 }
10443
10444 operator const VkBufferCreateInfo&() const
10445 {
10446 return *reinterpret_cast<const VkBufferCreateInfo*>(this);
10447 }
10448
10449 bool operator==( BufferCreateInfo const& rhs ) const
10450 {
10451 return ( sType == rhs.sType )
10452 && ( pNext == rhs.pNext )
10453 && ( flags == rhs.flags )
10454 && ( size == rhs.size )
10455 && ( usage == rhs.usage )
10456 && ( sharingMode == rhs.sharingMode )
10457 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
10458 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
10459 }
10460
10461 bool operator!=( BufferCreateInfo const& rhs ) const
10462 {
10463 return !operator==( rhs );
10464 }
10465
10466 private:
10467 StructureType sType;
10468
10469 public:
10470 const void* pNext;
10471 BufferCreateFlags flags;
10472 DeviceSize size;
10473 BufferUsageFlags usage;
10474 SharingMode sharingMode;
10475 uint32_t queueFamilyIndexCount;
10476 const uint32_t* pQueueFamilyIndices;
10477 };
10478 static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
10479
10480 enum class ShaderStageFlagBits
10481 {
10482 eVertex = VK_SHADER_STAGE_VERTEX_BIT,
10483 eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
10484 eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
10485 eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
10486 eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
10487 eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
10488 eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
10489 eAll = VK_SHADER_STAGE_ALL
10490 };
10491
10492 using ShaderStageFlags = Flags<ShaderStageFlagBits, VkShaderStageFlags>;
10493
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010494 VULKAN_HPP_INLINE ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010495 {
10496 return ShaderStageFlags( bit0 ) | bit1;
10497 }
10498
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010499 VULKAN_HPP_INLINE ShaderStageFlags operator~( ShaderStageFlagBits bits )
10500 {
10501 return ~( ShaderStageFlags( bits ) );
10502 }
10503
10504 template <> struct FlagTraits<ShaderStageFlagBits>
10505 {
10506 enum
10507 {
10508 allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll)
10509 };
10510 };
10511
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010512 struct DescriptorSetLayoutBinding
10513 {
10514 DescriptorSetLayoutBinding( uint32_t binding_ = 0, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = 0, ShaderStageFlags stageFlags_ = ShaderStageFlags(), const Sampler* pImmutableSamplers_ = nullptr )
10515 : binding( binding_ )
10516 , descriptorType( descriptorType_ )
10517 , descriptorCount( descriptorCount_ )
10518 , stageFlags( stageFlags_ )
10519 , pImmutableSamplers( pImmutableSamplers_ )
10520 {
10521 }
10522
10523 DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs )
10524 {
10525 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10526 }
10527
10528 DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs )
10529 {
10530 memcpy( this, &rhs, sizeof(DescriptorSetLayoutBinding) );
10531 return *this;
10532 }
10533
10534 DescriptorSetLayoutBinding& setBinding( uint32_t binding_ )
10535 {
10536 binding = binding_;
10537 return *this;
10538 }
10539
10540 DescriptorSetLayoutBinding& setDescriptorType( DescriptorType descriptorType_ )
10541 {
10542 descriptorType = descriptorType_;
10543 return *this;
10544 }
10545
10546 DescriptorSetLayoutBinding& setDescriptorCount( uint32_t descriptorCount_ )
10547 {
10548 descriptorCount = descriptorCount_;
10549 return *this;
10550 }
10551
10552 DescriptorSetLayoutBinding& setStageFlags( ShaderStageFlags stageFlags_ )
10553 {
10554 stageFlags = stageFlags_;
10555 return *this;
10556 }
10557
10558 DescriptorSetLayoutBinding& setPImmutableSamplers( const Sampler* pImmutableSamplers_ )
10559 {
10560 pImmutableSamplers = pImmutableSamplers_;
10561 return *this;
10562 }
10563
10564 operator const VkDescriptorSetLayoutBinding&() const
10565 {
10566 return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>(this);
10567 }
10568
10569 bool operator==( DescriptorSetLayoutBinding const& rhs ) const
10570 {
10571 return ( binding == rhs.binding )
10572 && ( descriptorType == rhs.descriptorType )
10573 && ( descriptorCount == rhs.descriptorCount )
10574 && ( stageFlags == rhs.stageFlags )
10575 && ( pImmutableSamplers == rhs.pImmutableSamplers );
10576 }
10577
10578 bool operator!=( DescriptorSetLayoutBinding const& rhs ) const
10579 {
10580 return !operator==( rhs );
10581 }
10582
10583 uint32_t binding;
10584 DescriptorType descriptorType;
10585 uint32_t descriptorCount;
10586 ShaderStageFlags stageFlags;
10587 const Sampler* pImmutableSamplers;
10588 };
10589 static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
10590
10591 struct DescriptorSetLayoutCreateInfo
10592 {
10593 DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = DescriptorSetLayoutCreateFlags(), uint32_t bindingCount_ = 0, const DescriptorSetLayoutBinding* pBindings_ = nullptr )
10594 : sType( StructureType::eDescriptorSetLayoutCreateInfo )
10595 , pNext( nullptr )
10596 , flags( flags_ )
10597 , bindingCount( bindingCount_ )
10598 , pBindings( pBindings_ )
10599 {
10600 }
10601
10602 DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs )
10603 {
10604 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10605 }
10606
10607 DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs )
10608 {
10609 memcpy( this, &rhs, sizeof(DescriptorSetLayoutCreateInfo) );
10610 return *this;
10611 }
10612
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010613 DescriptorSetLayoutCreateInfo& setPNext( const void* pNext_ )
10614 {
10615 pNext = pNext_;
10616 return *this;
10617 }
10618
10619 DescriptorSetLayoutCreateInfo& setFlags( DescriptorSetLayoutCreateFlags flags_ )
10620 {
10621 flags = flags_;
10622 return *this;
10623 }
10624
10625 DescriptorSetLayoutCreateInfo& setBindingCount( uint32_t bindingCount_ )
10626 {
10627 bindingCount = bindingCount_;
10628 return *this;
10629 }
10630
10631 DescriptorSetLayoutCreateInfo& setPBindings( const DescriptorSetLayoutBinding* pBindings_ )
10632 {
10633 pBindings = pBindings_;
10634 return *this;
10635 }
10636
10637 operator const VkDescriptorSetLayoutCreateInfo&() const
10638 {
10639 return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>(this);
10640 }
10641
10642 bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const
10643 {
10644 return ( sType == rhs.sType )
10645 && ( pNext == rhs.pNext )
10646 && ( flags == rhs.flags )
10647 && ( bindingCount == rhs.bindingCount )
10648 && ( pBindings == rhs.pBindings );
10649 }
10650
10651 bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const
10652 {
10653 return !operator==( rhs );
10654 }
10655
10656 private:
10657 StructureType sType;
10658
10659 public:
10660 const void* pNext;
10661 DescriptorSetLayoutCreateFlags flags;
10662 uint32_t bindingCount;
10663 const DescriptorSetLayoutBinding* pBindings;
10664 };
10665 static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
10666
10667 struct PipelineShaderStageCreateInfo
10668 {
10669 PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = PipelineShaderStageCreateFlags(), ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = ShaderModule(), const char* pName_ = nullptr, const SpecializationInfo* pSpecializationInfo_ = nullptr )
10670 : sType( StructureType::ePipelineShaderStageCreateInfo )
10671 , pNext( nullptr )
10672 , flags( flags_ )
10673 , stage( stage_ )
10674 , module( module_ )
10675 , pName( pName_ )
10676 , pSpecializationInfo( pSpecializationInfo_ )
10677 {
10678 }
10679
10680 PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs )
10681 {
10682 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10683 }
10684
10685 PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs )
10686 {
10687 memcpy( this, &rhs, sizeof(PipelineShaderStageCreateInfo) );
10688 return *this;
10689 }
10690
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010691 PipelineShaderStageCreateInfo& setPNext( const void* pNext_ )
10692 {
10693 pNext = pNext_;
10694 return *this;
10695 }
10696
10697 PipelineShaderStageCreateInfo& setFlags( PipelineShaderStageCreateFlags flags_ )
10698 {
10699 flags = flags_;
10700 return *this;
10701 }
10702
10703 PipelineShaderStageCreateInfo& setStage( ShaderStageFlagBits stage_ )
10704 {
10705 stage = stage_;
10706 return *this;
10707 }
10708
10709 PipelineShaderStageCreateInfo& setModule( ShaderModule module_ )
10710 {
10711 module = module_;
10712 return *this;
10713 }
10714
10715 PipelineShaderStageCreateInfo& setPName( const char* pName_ )
10716 {
10717 pName = pName_;
10718 return *this;
10719 }
10720
10721 PipelineShaderStageCreateInfo& setPSpecializationInfo( const SpecializationInfo* pSpecializationInfo_ )
10722 {
10723 pSpecializationInfo = pSpecializationInfo_;
10724 return *this;
10725 }
10726
10727 operator const VkPipelineShaderStageCreateInfo&() const
10728 {
10729 return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>(this);
10730 }
10731
10732 bool operator==( PipelineShaderStageCreateInfo const& rhs ) const
10733 {
10734 return ( sType == rhs.sType )
10735 && ( pNext == rhs.pNext )
10736 && ( flags == rhs.flags )
10737 && ( stage == rhs.stage )
10738 && ( module == rhs.module )
10739 && ( pName == rhs.pName )
10740 && ( pSpecializationInfo == rhs.pSpecializationInfo );
10741 }
10742
10743 bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const
10744 {
10745 return !operator==( rhs );
10746 }
10747
10748 private:
10749 StructureType sType;
10750
10751 public:
10752 const void* pNext;
10753 PipelineShaderStageCreateFlags flags;
10754 ShaderStageFlagBits stage;
10755 ShaderModule module;
10756 const char* pName;
10757 const SpecializationInfo* pSpecializationInfo;
10758 };
10759 static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
10760
10761 struct PushConstantRange
10762 {
10763 PushConstantRange( ShaderStageFlags stageFlags_ = ShaderStageFlags(), uint32_t offset_ = 0, uint32_t size_ = 0 )
10764 : stageFlags( stageFlags_ )
10765 , offset( offset_ )
10766 , size( size_ )
10767 {
10768 }
10769
10770 PushConstantRange( VkPushConstantRange const & rhs )
10771 {
10772 memcpy( this, &rhs, sizeof(PushConstantRange) );
10773 }
10774
10775 PushConstantRange& operator=( VkPushConstantRange const & rhs )
10776 {
10777 memcpy( this, &rhs, sizeof(PushConstantRange) );
10778 return *this;
10779 }
10780
10781 PushConstantRange& setStageFlags( ShaderStageFlags stageFlags_ )
10782 {
10783 stageFlags = stageFlags_;
10784 return *this;
10785 }
10786
10787 PushConstantRange& setOffset( uint32_t offset_ )
10788 {
10789 offset = offset_;
10790 return *this;
10791 }
10792
10793 PushConstantRange& setSize( uint32_t size_ )
10794 {
10795 size = size_;
10796 return *this;
10797 }
10798
10799 operator const VkPushConstantRange&() const
10800 {
10801 return *reinterpret_cast<const VkPushConstantRange*>(this);
10802 }
10803
10804 bool operator==( PushConstantRange const& rhs ) const
10805 {
10806 return ( stageFlags == rhs.stageFlags )
10807 && ( offset == rhs.offset )
10808 && ( size == rhs.size );
10809 }
10810
10811 bool operator!=( PushConstantRange const& rhs ) const
10812 {
10813 return !operator==( rhs );
10814 }
10815
10816 ShaderStageFlags stageFlags;
10817 uint32_t offset;
10818 uint32_t size;
10819 };
10820 static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
10821
10822 struct PipelineLayoutCreateInfo
10823 {
10824 PipelineLayoutCreateInfo( PipelineLayoutCreateFlags flags_ = PipelineLayoutCreateFlags(), uint32_t setLayoutCount_ = 0, const DescriptorSetLayout* pSetLayouts_ = nullptr, uint32_t pushConstantRangeCount_ = 0, const PushConstantRange* pPushConstantRanges_ = nullptr )
10825 : sType( StructureType::ePipelineLayoutCreateInfo )
10826 , pNext( nullptr )
10827 , flags( flags_ )
10828 , setLayoutCount( setLayoutCount_ )
10829 , pSetLayouts( pSetLayouts_ )
10830 , pushConstantRangeCount( pushConstantRangeCount_ )
10831 , pPushConstantRanges( pPushConstantRanges_ )
10832 {
10833 }
10834
10835 PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs )
10836 {
10837 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10838 }
10839
10840 PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs )
10841 {
10842 memcpy( this, &rhs, sizeof(PipelineLayoutCreateInfo) );
10843 return *this;
10844 }
10845
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010846 PipelineLayoutCreateInfo& setPNext( const void* pNext_ )
10847 {
10848 pNext = pNext_;
10849 return *this;
10850 }
10851
10852 PipelineLayoutCreateInfo& setFlags( PipelineLayoutCreateFlags flags_ )
10853 {
10854 flags = flags_;
10855 return *this;
10856 }
10857
10858 PipelineLayoutCreateInfo& setSetLayoutCount( uint32_t setLayoutCount_ )
10859 {
10860 setLayoutCount = setLayoutCount_;
10861 return *this;
10862 }
10863
10864 PipelineLayoutCreateInfo& setPSetLayouts( const DescriptorSetLayout* pSetLayouts_ )
10865 {
10866 pSetLayouts = pSetLayouts_;
10867 return *this;
10868 }
10869
10870 PipelineLayoutCreateInfo& setPushConstantRangeCount( uint32_t pushConstantRangeCount_ )
10871 {
10872 pushConstantRangeCount = pushConstantRangeCount_;
10873 return *this;
10874 }
10875
10876 PipelineLayoutCreateInfo& setPPushConstantRanges( const PushConstantRange* pPushConstantRanges_ )
10877 {
10878 pPushConstantRanges = pPushConstantRanges_;
10879 return *this;
10880 }
10881
10882 operator const VkPipelineLayoutCreateInfo&() const
10883 {
10884 return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>(this);
10885 }
10886
10887 bool operator==( PipelineLayoutCreateInfo const& rhs ) const
10888 {
10889 return ( sType == rhs.sType )
10890 && ( pNext == rhs.pNext )
10891 && ( flags == rhs.flags )
10892 && ( setLayoutCount == rhs.setLayoutCount )
10893 && ( pSetLayouts == rhs.pSetLayouts )
10894 && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
10895 && ( pPushConstantRanges == rhs.pPushConstantRanges );
10896 }
10897
10898 bool operator!=( PipelineLayoutCreateInfo const& rhs ) const
10899 {
10900 return !operator==( rhs );
10901 }
10902
10903 private:
10904 StructureType sType;
10905
10906 public:
10907 const void* pNext;
10908 PipelineLayoutCreateFlags flags;
10909 uint32_t setLayoutCount;
10910 const DescriptorSetLayout* pSetLayouts;
10911 uint32_t pushConstantRangeCount;
10912 const PushConstantRange* pPushConstantRanges;
10913 };
10914 static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
10915
10916 enum class ImageUsageFlagBits
10917 {
10918 eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
10919 eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
10920 eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
10921 eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
10922 eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
10923 eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
10924 eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
10925 eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT
10926 };
10927
10928 using ImageUsageFlags = Flags<ImageUsageFlagBits, VkImageUsageFlags>;
10929
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010930 VULKAN_HPP_INLINE ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010931 {
10932 return ImageUsageFlags( bit0 ) | bit1;
10933 }
10934
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010935 VULKAN_HPP_INLINE ImageUsageFlags operator~( ImageUsageFlagBits bits )
10936 {
10937 return ~( ImageUsageFlags( bits ) );
10938 }
10939
10940 template <> struct FlagTraits<ImageUsageFlagBits>
10941 {
10942 enum
10943 {
10944 allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment)
10945 };
10946 };
10947
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010948 enum class ImageCreateFlagBits
10949 {
10950 eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
10951 eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
10952 eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
10953 eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
Mark Young39389872017-01-19 21:10:49 -070010954 eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
10955 e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010956 };
10957
10958 using ImageCreateFlags = Flags<ImageCreateFlagBits, VkImageCreateFlags>;
10959
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010960 VULKAN_HPP_INLINE ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060010961 {
10962 return ImageCreateFlags( bit0 ) | bit1;
10963 }
10964
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010965 VULKAN_HPP_INLINE ImageCreateFlags operator~( ImageCreateFlagBits bits )
10966 {
10967 return ~( ImageCreateFlags( bits ) );
10968 }
10969
10970 template <> struct FlagTraits<ImageCreateFlagBits>
10971 {
10972 enum
10973 {
Mark Young39389872017-01-19 21:10:49 -070010974 allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::e2DArrayCompatibleKHR)
Mark Lobodzinski2d589822016-12-12 09:44:34 -070010975 };
10976 };
10977
Mark Young39389872017-01-19 21:10:49 -070010978 struct PhysicalDeviceImageFormatInfo2KHR
10979 {
10980 PhysicalDeviceImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), ImageCreateFlags flags_ = ImageCreateFlags() )
10981 : sType( StructureType::ePhysicalDeviceImageFormatInfo2KHR )
10982 , pNext( nullptr )
10983 , format( format_ )
10984 , type( type_ )
10985 , tiling( tiling_ )
10986 , usage( usage_ )
10987 , flags( flags_ )
10988 {
10989 }
10990
10991 PhysicalDeviceImageFormatInfo2KHR( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
10992 {
10993 memcpy( this, &rhs, sizeof(PhysicalDeviceImageFormatInfo2KHR) );
10994 }
10995
10996 PhysicalDeviceImageFormatInfo2KHR& operator=( VkPhysicalDeviceImageFormatInfo2KHR const & rhs )
10997 {
10998 memcpy( this, &rhs, sizeof(PhysicalDeviceImageFormatInfo2KHR) );
10999 return *this;
11000 }
11001
Mark Young39389872017-01-19 21:10:49 -070011002 PhysicalDeviceImageFormatInfo2KHR& setPNext( const void* pNext_ )
11003 {
11004 pNext = pNext_;
11005 return *this;
11006 }
11007
11008 PhysicalDeviceImageFormatInfo2KHR& setFormat( Format format_ )
11009 {
11010 format = format_;
11011 return *this;
11012 }
11013
11014 PhysicalDeviceImageFormatInfo2KHR& setType( ImageType type_ )
11015 {
11016 type = type_;
11017 return *this;
11018 }
11019
11020 PhysicalDeviceImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
11021 {
11022 tiling = tiling_;
11023 return *this;
11024 }
11025
11026 PhysicalDeviceImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
11027 {
11028 usage = usage_;
11029 return *this;
11030 }
11031
11032 PhysicalDeviceImageFormatInfo2KHR& setFlags( ImageCreateFlags flags_ )
11033 {
11034 flags = flags_;
11035 return *this;
11036 }
11037
11038 operator const VkPhysicalDeviceImageFormatInfo2KHR&() const
11039 {
11040 return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>(this);
11041 }
11042
11043 bool operator==( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
11044 {
11045 return ( sType == rhs.sType )
11046 && ( pNext == rhs.pNext )
11047 && ( format == rhs.format )
11048 && ( type == rhs.type )
11049 && ( tiling == rhs.tiling )
11050 && ( usage == rhs.usage )
11051 && ( flags == rhs.flags );
11052 }
11053
11054 bool operator!=( PhysicalDeviceImageFormatInfo2KHR const& rhs ) const
11055 {
11056 return !operator==( rhs );
11057 }
11058
11059 private:
11060 StructureType sType;
11061
11062 public:
11063 const void* pNext;
11064 Format format;
11065 ImageType type;
11066 ImageTiling tiling;
11067 ImageUsageFlags usage;
11068 ImageCreateFlags flags;
11069 };
11070 static_assert( sizeof( PhysicalDeviceImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceImageFormatInfo2KHR ), "struct and wrapper have different size!" );
11071
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011072 enum class PipelineCreateFlagBits
11073 {
11074 eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
11075 eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
11076 eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT
11077 };
11078
11079 using PipelineCreateFlags = Flags<PipelineCreateFlagBits, VkPipelineCreateFlags>;
11080
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011081 VULKAN_HPP_INLINE PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011082 {
11083 return PipelineCreateFlags( bit0 ) | bit1;
11084 }
11085
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011086 VULKAN_HPP_INLINE PipelineCreateFlags operator~( PipelineCreateFlagBits bits )
11087 {
11088 return ~( PipelineCreateFlags( bits ) );
11089 }
11090
11091 template <> struct FlagTraits<PipelineCreateFlagBits>
11092 {
11093 enum
11094 {
11095 allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative)
11096 };
11097 };
11098
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011099 struct ComputePipelineCreateInfo
11100 {
11101 ComputePipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), PipelineShaderStageCreateInfo stage_ = PipelineShaderStageCreateInfo(), PipelineLayout layout_ = PipelineLayout(), Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
11102 : sType( StructureType::eComputePipelineCreateInfo )
11103 , pNext( nullptr )
11104 , flags( flags_ )
11105 , stage( stage_ )
11106 , layout( layout_ )
11107 , basePipelineHandle( basePipelineHandle_ )
11108 , basePipelineIndex( basePipelineIndex_ )
11109 {
11110 }
11111
11112 ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs )
11113 {
11114 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
11115 }
11116
11117 ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs )
11118 {
11119 memcpy( this, &rhs, sizeof(ComputePipelineCreateInfo) );
11120 return *this;
11121 }
11122
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011123 ComputePipelineCreateInfo& setPNext( const void* pNext_ )
11124 {
11125 pNext = pNext_;
11126 return *this;
11127 }
11128
11129 ComputePipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
11130 {
11131 flags = flags_;
11132 return *this;
11133 }
11134
11135 ComputePipelineCreateInfo& setStage( PipelineShaderStageCreateInfo stage_ )
11136 {
11137 stage = stage_;
11138 return *this;
11139 }
11140
11141 ComputePipelineCreateInfo& setLayout( PipelineLayout layout_ )
11142 {
11143 layout = layout_;
11144 return *this;
11145 }
11146
11147 ComputePipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
11148 {
11149 basePipelineHandle = basePipelineHandle_;
11150 return *this;
11151 }
11152
11153 ComputePipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
11154 {
11155 basePipelineIndex = basePipelineIndex_;
11156 return *this;
11157 }
11158
11159 operator const VkComputePipelineCreateInfo&() const
11160 {
11161 return *reinterpret_cast<const VkComputePipelineCreateInfo*>(this);
11162 }
11163
11164 bool operator==( ComputePipelineCreateInfo const& rhs ) const
11165 {
11166 return ( sType == rhs.sType )
11167 && ( pNext == rhs.pNext )
11168 && ( flags == rhs.flags )
11169 && ( stage == rhs.stage )
11170 && ( layout == rhs.layout )
11171 && ( basePipelineHandle == rhs.basePipelineHandle )
11172 && ( basePipelineIndex == rhs.basePipelineIndex );
11173 }
11174
11175 bool operator!=( ComputePipelineCreateInfo const& rhs ) const
11176 {
11177 return !operator==( rhs );
11178 }
11179
11180 private:
11181 StructureType sType;
11182
11183 public:
11184 const void* pNext;
11185 PipelineCreateFlags flags;
11186 PipelineShaderStageCreateInfo stage;
11187 PipelineLayout layout;
11188 Pipeline basePipelineHandle;
11189 int32_t basePipelineIndex;
11190 };
11191 static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
11192
11193 enum class ColorComponentFlagBits
11194 {
11195 eR = VK_COLOR_COMPONENT_R_BIT,
11196 eG = VK_COLOR_COMPONENT_G_BIT,
11197 eB = VK_COLOR_COMPONENT_B_BIT,
11198 eA = VK_COLOR_COMPONENT_A_BIT
11199 };
11200
11201 using ColorComponentFlags = Flags<ColorComponentFlagBits, VkColorComponentFlags>;
11202
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011203 VULKAN_HPP_INLINE ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011204 {
11205 return ColorComponentFlags( bit0 ) | bit1;
11206 }
11207
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011208 VULKAN_HPP_INLINE ColorComponentFlags operator~( ColorComponentFlagBits bits )
11209 {
11210 return ~( ColorComponentFlags( bits ) );
11211 }
11212
11213 template <> struct FlagTraits<ColorComponentFlagBits>
11214 {
11215 enum
11216 {
11217 allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA)
11218 };
11219 };
11220
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011221 struct PipelineColorBlendAttachmentState
11222 {
11223 PipelineColorBlendAttachmentState( Bool32 blendEnable_ = 0, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = ColorComponentFlags() )
11224 : blendEnable( blendEnable_ )
11225 , srcColorBlendFactor( srcColorBlendFactor_ )
11226 , dstColorBlendFactor( dstColorBlendFactor_ )
11227 , colorBlendOp( colorBlendOp_ )
11228 , srcAlphaBlendFactor( srcAlphaBlendFactor_ )
11229 , dstAlphaBlendFactor( dstAlphaBlendFactor_ )
11230 , alphaBlendOp( alphaBlendOp_ )
11231 , colorWriteMask( colorWriteMask_ )
11232 {
11233 }
11234
11235 PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs )
11236 {
11237 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
11238 }
11239
11240 PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs )
11241 {
11242 memcpy( this, &rhs, sizeof(PipelineColorBlendAttachmentState) );
11243 return *this;
11244 }
11245
11246 PipelineColorBlendAttachmentState& setBlendEnable( Bool32 blendEnable_ )
11247 {
11248 blendEnable = blendEnable_;
11249 return *this;
11250 }
11251
11252 PipelineColorBlendAttachmentState& setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ )
11253 {
11254 srcColorBlendFactor = srcColorBlendFactor_;
11255 return *this;
11256 }
11257
11258 PipelineColorBlendAttachmentState& setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ )
11259 {
11260 dstColorBlendFactor = dstColorBlendFactor_;
11261 return *this;
11262 }
11263
11264 PipelineColorBlendAttachmentState& setColorBlendOp( BlendOp colorBlendOp_ )
11265 {
11266 colorBlendOp = colorBlendOp_;
11267 return *this;
11268 }
11269
11270 PipelineColorBlendAttachmentState& setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ )
11271 {
11272 srcAlphaBlendFactor = srcAlphaBlendFactor_;
11273 return *this;
11274 }
11275
11276 PipelineColorBlendAttachmentState& setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ )
11277 {
11278 dstAlphaBlendFactor = dstAlphaBlendFactor_;
11279 return *this;
11280 }
11281
11282 PipelineColorBlendAttachmentState& setAlphaBlendOp( BlendOp alphaBlendOp_ )
11283 {
11284 alphaBlendOp = alphaBlendOp_;
11285 return *this;
11286 }
11287
11288 PipelineColorBlendAttachmentState& setColorWriteMask( ColorComponentFlags colorWriteMask_ )
11289 {
11290 colorWriteMask = colorWriteMask_;
11291 return *this;
11292 }
11293
11294 operator const VkPipelineColorBlendAttachmentState&() const
11295 {
11296 return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>(this);
11297 }
11298
11299 bool operator==( PipelineColorBlendAttachmentState const& rhs ) const
11300 {
11301 return ( blendEnable == rhs.blendEnable )
11302 && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
11303 && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
11304 && ( colorBlendOp == rhs.colorBlendOp )
11305 && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
11306 && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
11307 && ( alphaBlendOp == rhs.alphaBlendOp )
11308 && ( colorWriteMask == rhs.colorWriteMask );
11309 }
11310
11311 bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const
11312 {
11313 return !operator==( rhs );
11314 }
11315
11316 Bool32 blendEnable;
11317 BlendFactor srcColorBlendFactor;
11318 BlendFactor dstColorBlendFactor;
11319 BlendOp colorBlendOp;
11320 BlendFactor srcAlphaBlendFactor;
11321 BlendFactor dstAlphaBlendFactor;
11322 BlendOp alphaBlendOp;
11323 ColorComponentFlags colorWriteMask;
11324 };
11325 static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
11326
11327 struct PipelineColorBlendStateCreateInfo
11328 {
11329 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = PipelineColorBlendStateCreateFlags(), Bool32 logicOpEnable_ = 0, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = 0, const PipelineColorBlendAttachmentState* pAttachments_ = nullptr, std::array<float,4> const& blendConstants_ = { { 0, 0, 0, 0 } } )
11330 : sType( StructureType::ePipelineColorBlendStateCreateInfo )
11331 , pNext( nullptr )
11332 , flags( flags_ )
11333 , logicOpEnable( logicOpEnable_ )
11334 , logicOp( logicOp_ )
11335 , attachmentCount( attachmentCount_ )
11336 , pAttachments( pAttachments_ )
11337 {
11338 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
11339 }
11340
11341 PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs )
11342 {
11343 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
11344 }
11345
11346 PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs )
11347 {
11348 memcpy( this, &rhs, sizeof(PipelineColorBlendStateCreateInfo) );
11349 return *this;
11350 }
11351
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011352 PipelineColorBlendStateCreateInfo& setPNext( const void* pNext_ )
11353 {
11354 pNext = pNext_;
11355 return *this;
11356 }
11357
11358 PipelineColorBlendStateCreateInfo& setFlags( PipelineColorBlendStateCreateFlags flags_ )
11359 {
11360 flags = flags_;
11361 return *this;
11362 }
11363
11364 PipelineColorBlendStateCreateInfo& setLogicOpEnable( Bool32 logicOpEnable_ )
11365 {
11366 logicOpEnable = logicOpEnable_;
11367 return *this;
11368 }
11369
11370 PipelineColorBlendStateCreateInfo& setLogicOp( LogicOp logicOp_ )
11371 {
11372 logicOp = logicOp_;
11373 return *this;
11374 }
11375
11376 PipelineColorBlendStateCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
11377 {
11378 attachmentCount = attachmentCount_;
11379 return *this;
11380 }
11381
11382 PipelineColorBlendStateCreateInfo& setPAttachments( const PipelineColorBlendAttachmentState* pAttachments_ )
11383 {
11384 pAttachments = pAttachments_;
11385 return *this;
11386 }
11387
11388 PipelineColorBlendStateCreateInfo& setBlendConstants( std::array<float,4> blendConstants_ )
11389 {
11390 memcpy( &blendConstants, blendConstants_.data(), 4 * sizeof( float ) );
11391 return *this;
11392 }
11393
11394 operator const VkPipelineColorBlendStateCreateInfo&() const
11395 {
11396 return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>(this);
11397 }
11398
11399 bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const
11400 {
11401 return ( sType == rhs.sType )
11402 && ( pNext == rhs.pNext )
11403 && ( flags == rhs.flags )
11404 && ( logicOpEnable == rhs.logicOpEnable )
11405 && ( logicOp == rhs.logicOp )
11406 && ( attachmentCount == rhs.attachmentCount )
11407 && ( pAttachments == rhs.pAttachments )
11408 && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 );
11409 }
11410
11411 bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const
11412 {
11413 return !operator==( rhs );
11414 }
11415
11416 private:
11417 StructureType sType;
11418
11419 public:
11420 const void* pNext;
11421 PipelineColorBlendStateCreateFlags flags;
11422 Bool32 logicOpEnable;
11423 LogicOp logicOp;
11424 uint32_t attachmentCount;
11425 const PipelineColorBlendAttachmentState* pAttachments;
11426 float blendConstants[4];
11427 };
11428 static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
11429
11430 enum class FenceCreateFlagBits
11431 {
11432 eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
11433 };
11434
11435 using FenceCreateFlags = Flags<FenceCreateFlagBits, VkFenceCreateFlags>;
11436
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011437 VULKAN_HPP_INLINE FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011438 {
11439 return FenceCreateFlags( bit0 ) | bit1;
11440 }
11441
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011442 VULKAN_HPP_INLINE FenceCreateFlags operator~( FenceCreateFlagBits bits )
11443 {
11444 return ~( FenceCreateFlags( bits ) );
11445 }
11446
11447 template <> struct FlagTraits<FenceCreateFlagBits>
11448 {
11449 enum
11450 {
11451 allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
11452 };
11453 };
11454
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011455 struct FenceCreateInfo
11456 {
11457 FenceCreateInfo( FenceCreateFlags flags_ = FenceCreateFlags() )
11458 : sType( StructureType::eFenceCreateInfo )
11459 , pNext( nullptr )
11460 , flags( flags_ )
11461 {
11462 }
11463
11464 FenceCreateInfo( VkFenceCreateInfo const & rhs )
11465 {
11466 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11467 }
11468
11469 FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs )
11470 {
11471 memcpy( this, &rhs, sizeof(FenceCreateInfo) );
11472 return *this;
11473 }
11474
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011475 FenceCreateInfo& setPNext( const void* pNext_ )
11476 {
11477 pNext = pNext_;
11478 return *this;
11479 }
11480
11481 FenceCreateInfo& setFlags( FenceCreateFlags flags_ )
11482 {
11483 flags = flags_;
11484 return *this;
11485 }
11486
11487 operator const VkFenceCreateInfo&() const
11488 {
11489 return *reinterpret_cast<const VkFenceCreateInfo*>(this);
11490 }
11491
11492 bool operator==( FenceCreateInfo const& rhs ) const
11493 {
11494 return ( sType == rhs.sType )
11495 && ( pNext == rhs.pNext )
11496 && ( flags == rhs.flags );
11497 }
11498
11499 bool operator!=( FenceCreateInfo const& rhs ) const
11500 {
11501 return !operator==( rhs );
11502 }
11503
11504 private:
11505 StructureType sType;
11506
11507 public:
11508 const void* pNext;
11509 FenceCreateFlags flags;
11510 };
11511 static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
11512
11513 enum class FormatFeatureFlagBits
11514 {
11515 eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
11516 eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
11517 eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
11518 eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
11519 eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
11520 eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
11521 eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
11522 eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
11523 eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
11524 eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
11525 eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
11526 eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
11527 eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
Mark Young39389872017-01-19 21:10:49 -070011528 eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
11529 eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR,
11530 eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011531 };
11532
11533 using FormatFeatureFlags = Flags<FormatFeatureFlagBits, VkFormatFeatureFlags>;
11534
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011535 VULKAN_HPP_INLINE FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011536 {
11537 return FormatFeatureFlags( bit0 ) | bit1;
11538 }
11539
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011540 VULKAN_HPP_INLINE FormatFeatureFlags operator~( FormatFeatureFlagBits bits )
11541 {
11542 return ~( FormatFeatureFlags( bits ) );
11543 }
11544
11545 template <> struct FlagTraits<FormatFeatureFlagBits>
11546 {
11547 enum
11548 {
Mark Young39389872017-01-19 21:10:49 -070011549 allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eTransferSrcKHR) | VkFlags(FormatFeatureFlagBits::eTransferDstKHR)
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011550 };
11551 };
11552
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011553 struct FormatProperties
11554 {
11555 operator const VkFormatProperties&() const
11556 {
11557 return *reinterpret_cast<const VkFormatProperties*>(this);
11558 }
11559
11560 bool operator==( FormatProperties const& rhs ) const
11561 {
11562 return ( linearTilingFeatures == rhs.linearTilingFeatures )
11563 && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
11564 && ( bufferFeatures == rhs.bufferFeatures );
11565 }
11566
11567 bool operator!=( FormatProperties const& rhs ) const
11568 {
11569 return !operator==( rhs );
11570 }
11571
11572 FormatFeatureFlags linearTilingFeatures;
11573 FormatFeatureFlags optimalTilingFeatures;
11574 FormatFeatureFlags bufferFeatures;
11575 };
11576 static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
11577
Mark Young39389872017-01-19 21:10:49 -070011578 struct FormatProperties2KHR
11579 {
11580 operator const VkFormatProperties2KHR&() const
11581 {
11582 return *reinterpret_cast<const VkFormatProperties2KHR*>(this);
11583 }
11584
11585 bool operator==( FormatProperties2KHR const& rhs ) const
11586 {
11587 return ( sType == rhs.sType )
11588 && ( pNext == rhs.pNext )
11589 && ( formatProperties == rhs.formatProperties );
11590 }
11591
11592 bool operator!=( FormatProperties2KHR const& rhs ) const
11593 {
11594 return !operator==( rhs );
11595 }
11596
11597 private:
11598 StructureType sType;
11599
11600 public:
11601 void* pNext;
11602 FormatProperties formatProperties;
11603 };
11604 static_assert( sizeof( FormatProperties2KHR ) == sizeof( VkFormatProperties2KHR ), "struct and wrapper have different size!" );
11605
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011606 enum class QueryControlFlagBits
11607 {
11608 ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
11609 };
11610
11611 using QueryControlFlags = Flags<QueryControlFlagBits, VkQueryControlFlags>;
11612
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011613 VULKAN_HPP_INLINE QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011614 {
11615 return QueryControlFlags( bit0 ) | bit1;
11616 }
11617
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011618 VULKAN_HPP_INLINE QueryControlFlags operator~( QueryControlFlagBits bits )
11619 {
11620 return ~( QueryControlFlags( bits ) );
11621 }
11622
11623 template <> struct FlagTraits<QueryControlFlagBits>
11624 {
11625 enum
11626 {
11627 allFlags = VkFlags(QueryControlFlagBits::ePrecise)
11628 };
11629 };
11630
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011631 enum class QueryResultFlagBits
11632 {
11633 e64 = VK_QUERY_RESULT_64_BIT,
11634 eWait = VK_QUERY_RESULT_WAIT_BIT,
11635 eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
11636 ePartial = VK_QUERY_RESULT_PARTIAL_BIT
11637 };
11638
11639 using QueryResultFlags = Flags<QueryResultFlagBits, VkQueryResultFlags>;
11640
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011641 VULKAN_HPP_INLINE QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011642 {
11643 return QueryResultFlags( bit0 ) | bit1;
11644 }
11645
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011646 VULKAN_HPP_INLINE QueryResultFlags operator~( QueryResultFlagBits bits )
11647 {
11648 return ~( QueryResultFlags( bits ) );
11649 }
11650
11651 template <> struct FlagTraits<QueryResultFlagBits>
11652 {
11653 enum
11654 {
11655 allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial)
11656 };
11657 };
11658
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011659 enum class CommandBufferUsageFlagBits
11660 {
11661 eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
11662 eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
11663 eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
11664 };
11665
11666 using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits, VkCommandBufferUsageFlags>;
11667
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011668 VULKAN_HPP_INLINE CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011669 {
11670 return CommandBufferUsageFlags( bit0 ) | bit1;
11671 }
11672
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011673 VULKAN_HPP_INLINE CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits )
11674 {
11675 return ~( CommandBufferUsageFlags( bits ) );
11676 }
11677
11678 template <> struct FlagTraits<CommandBufferUsageFlagBits>
11679 {
11680 enum
11681 {
11682 allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
11683 };
11684 };
11685
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011686 enum class QueryPipelineStatisticFlagBits
11687 {
11688 eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
11689 eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
11690 eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
11691 eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
11692 eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
11693 eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
11694 eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
11695 eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
11696 eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
11697 eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
11698 eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT
11699 };
11700
11701 using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits, VkQueryPipelineStatisticFlags>;
11702
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011703 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011704 {
11705 return QueryPipelineStatisticFlags( bit0 ) | bit1;
11706 }
11707
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011708 VULKAN_HPP_INLINE QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits )
11709 {
11710 return ~( QueryPipelineStatisticFlags( bits ) );
11711 }
11712
11713 template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
11714 {
11715 enum
11716 {
11717 allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
11718 };
11719 };
11720
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011721 struct CommandBufferInheritanceInfo
11722 {
11723 CommandBufferInheritanceInfo( RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Framebuffer framebuffer_ = Framebuffer(), Bool32 occlusionQueryEnable_ = 0, QueryControlFlags queryFlags_ = QueryControlFlags(), QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11724 : sType( StructureType::eCommandBufferInheritanceInfo )
11725 , pNext( nullptr )
11726 , renderPass( renderPass_ )
11727 , subpass( subpass_ )
11728 , framebuffer( framebuffer_ )
11729 , occlusionQueryEnable( occlusionQueryEnable_ )
11730 , queryFlags( queryFlags_ )
11731 , pipelineStatistics( pipelineStatistics_ )
11732 {
11733 }
11734
11735 CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs )
11736 {
11737 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11738 }
11739
11740 CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs )
11741 {
11742 memcpy( this, &rhs, sizeof(CommandBufferInheritanceInfo) );
11743 return *this;
11744 }
11745
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011746 CommandBufferInheritanceInfo& setPNext( const void* pNext_ )
11747 {
11748 pNext = pNext_;
11749 return *this;
11750 }
11751
11752 CommandBufferInheritanceInfo& setRenderPass( RenderPass renderPass_ )
11753 {
11754 renderPass = renderPass_;
11755 return *this;
11756 }
11757
11758 CommandBufferInheritanceInfo& setSubpass( uint32_t subpass_ )
11759 {
11760 subpass = subpass_;
11761 return *this;
11762 }
11763
11764 CommandBufferInheritanceInfo& setFramebuffer( Framebuffer framebuffer_ )
11765 {
11766 framebuffer = framebuffer_;
11767 return *this;
11768 }
11769
11770 CommandBufferInheritanceInfo& setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ )
11771 {
11772 occlusionQueryEnable = occlusionQueryEnable_;
11773 return *this;
11774 }
11775
11776 CommandBufferInheritanceInfo& setQueryFlags( QueryControlFlags queryFlags_ )
11777 {
11778 queryFlags = queryFlags_;
11779 return *this;
11780 }
11781
11782 CommandBufferInheritanceInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11783 {
11784 pipelineStatistics = pipelineStatistics_;
11785 return *this;
11786 }
11787
11788 operator const VkCommandBufferInheritanceInfo&() const
11789 {
11790 return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>(this);
11791 }
11792
11793 bool operator==( CommandBufferInheritanceInfo const& rhs ) const
11794 {
11795 return ( sType == rhs.sType )
11796 && ( pNext == rhs.pNext )
11797 && ( renderPass == rhs.renderPass )
11798 && ( subpass == rhs.subpass )
11799 && ( framebuffer == rhs.framebuffer )
11800 && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
11801 && ( queryFlags == rhs.queryFlags )
11802 && ( pipelineStatistics == rhs.pipelineStatistics );
11803 }
11804
11805 bool operator!=( CommandBufferInheritanceInfo const& rhs ) const
11806 {
11807 return !operator==( rhs );
11808 }
11809
11810 private:
11811 StructureType sType;
11812
11813 public:
11814 const void* pNext;
11815 RenderPass renderPass;
11816 uint32_t subpass;
11817 Framebuffer framebuffer;
11818 Bool32 occlusionQueryEnable;
11819 QueryControlFlags queryFlags;
11820 QueryPipelineStatisticFlags pipelineStatistics;
11821 };
11822 static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
11823
11824 struct CommandBufferBeginInfo
11825 {
11826 CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = CommandBufferUsageFlags(), const CommandBufferInheritanceInfo* pInheritanceInfo_ = nullptr )
11827 : sType( StructureType::eCommandBufferBeginInfo )
11828 , pNext( nullptr )
11829 , flags( flags_ )
11830 , pInheritanceInfo( pInheritanceInfo_ )
11831 {
11832 }
11833
11834 CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs )
11835 {
11836 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11837 }
11838
11839 CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs )
11840 {
11841 memcpy( this, &rhs, sizeof(CommandBufferBeginInfo) );
11842 return *this;
11843 }
11844
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011845 CommandBufferBeginInfo& setPNext( const void* pNext_ )
11846 {
11847 pNext = pNext_;
11848 return *this;
11849 }
11850
11851 CommandBufferBeginInfo& setFlags( CommandBufferUsageFlags flags_ )
11852 {
11853 flags = flags_;
11854 return *this;
11855 }
11856
11857 CommandBufferBeginInfo& setPInheritanceInfo( const CommandBufferInheritanceInfo* pInheritanceInfo_ )
11858 {
11859 pInheritanceInfo = pInheritanceInfo_;
11860 return *this;
11861 }
11862
11863 operator const VkCommandBufferBeginInfo&() const
11864 {
11865 return *reinterpret_cast<const VkCommandBufferBeginInfo*>(this);
11866 }
11867
11868 bool operator==( CommandBufferBeginInfo const& rhs ) const
11869 {
11870 return ( sType == rhs.sType )
11871 && ( pNext == rhs.pNext )
11872 && ( flags == rhs.flags )
11873 && ( pInheritanceInfo == rhs.pInheritanceInfo );
11874 }
11875
11876 bool operator!=( CommandBufferBeginInfo const& rhs ) const
11877 {
11878 return !operator==( rhs );
11879 }
11880
11881 private:
11882 StructureType sType;
11883
11884 public:
11885 const void* pNext;
11886 CommandBufferUsageFlags flags;
11887 const CommandBufferInheritanceInfo* pInheritanceInfo;
11888 };
11889 static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
11890
11891 struct QueryPoolCreateInfo
11892 {
11893 QueryPoolCreateInfo( QueryPoolCreateFlags flags_ = QueryPoolCreateFlags(), QueryType queryType_ = QueryType::eOcclusion, uint32_t queryCount_ = 0, QueryPipelineStatisticFlags pipelineStatistics_ = QueryPipelineStatisticFlags() )
11894 : sType( StructureType::eQueryPoolCreateInfo )
11895 , pNext( nullptr )
11896 , flags( flags_ )
11897 , queryType( queryType_ )
11898 , queryCount( queryCount_ )
11899 , pipelineStatistics( pipelineStatistics_ )
11900 {
11901 }
11902
11903 QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs )
11904 {
11905 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11906 }
11907
11908 QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs )
11909 {
11910 memcpy( this, &rhs, sizeof(QueryPoolCreateInfo) );
11911 return *this;
11912 }
11913
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011914 QueryPoolCreateInfo& setPNext( const void* pNext_ )
11915 {
11916 pNext = pNext_;
11917 return *this;
11918 }
11919
11920 QueryPoolCreateInfo& setFlags( QueryPoolCreateFlags flags_ )
11921 {
11922 flags = flags_;
11923 return *this;
11924 }
11925
11926 QueryPoolCreateInfo& setQueryType( QueryType queryType_ )
11927 {
11928 queryType = queryType_;
11929 return *this;
11930 }
11931
11932 QueryPoolCreateInfo& setQueryCount( uint32_t queryCount_ )
11933 {
11934 queryCount = queryCount_;
11935 return *this;
11936 }
11937
11938 QueryPoolCreateInfo& setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ )
11939 {
11940 pipelineStatistics = pipelineStatistics_;
11941 return *this;
11942 }
11943
11944 operator const VkQueryPoolCreateInfo&() const
11945 {
11946 return *reinterpret_cast<const VkQueryPoolCreateInfo*>(this);
11947 }
11948
11949 bool operator==( QueryPoolCreateInfo const& rhs ) const
11950 {
11951 return ( sType == rhs.sType )
11952 && ( pNext == rhs.pNext )
11953 && ( flags == rhs.flags )
11954 && ( queryType == rhs.queryType )
11955 && ( queryCount == rhs.queryCount )
11956 && ( pipelineStatistics == rhs.pipelineStatistics );
11957 }
11958
11959 bool operator!=( QueryPoolCreateInfo const& rhs ) const
11960 {
11961 return !operator==( rhs );
11962 }
11963
11964 private:
11965 StructureType sType;
11966
11967 public:
11968 const void* pNext;
11969 QueryPoolCreateFlags flags;
11970 QueryType queryType;
11971 uint32_t queryCount;
11972 QueryPipelineStatisticFlags pipelineStatistics;
11973 };
11974 static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
11975
11976 enum class ImageAspectFlagBits
11977 {
11978 eColor = VK_IMAGE_ASPECT_COLOR_BIT,
11979 eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
11980 eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
11981 eMetadata = VK_IMAGE_ASPECT_METADATA_BIT
11982 };
11983
11984 using ImageAspectFlags = Flags<ImageAspectFlagBits, VkImageAspectFlags>;
11985
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011986 VULKAN_HPP_INLINE ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060011987 {
11988 return ImageAspectFlags( bit0 ) | bit1;
11989 }
11990
Mark Lobodzinski2d589822016-12-12 09:44:34 -070011991 VULKAN_HPP_INLINE ImageAspectFlags operator~( ImageAspectFlagBits bits )
11992 {
11993 return ~( ImageAspectFlags( bits ) );
11994 }
11995
11996 template <> struct FlagTraits<ImageAspectFlagBits>
11997 {
11998 enum
11999 {
12000 allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata)
12001 };
12002 };
12003
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012004 struct ImageSubresource
12005 {
12006 ImageSubresource( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t arrayLayer_ = 0 )
12007 : aspectMask( aspectMask_ )
12008 , mipLevel( mipLevel_ )
12009 , arrayLayer( arrayLayer_ )
12010 {
12011 }
12012
12013 ImageSubresource( VkImageSubresource const & rhs )
12014 {
12015 memcpy( this, &rhs, sizeof(ImageSubresource) );
12016 }
12017
12018 ImageSubresource& operator=( VkImageSubresource const & rhs )
12019 {
12020 memcpy( this, &rhs, sizeof(ImageSubresource) );
12021 return *this;
12022 }
12023
12024 ImageSubresource& setAspectMask( ImageAspectFlags aspectMask_ )
12025 {
12026 aspectMask = aspectMask_;
12027 return *this;
12028 }
12029
12030 ImageSubresource& setMipLevel( uint32_t mipLevel_ )
12031 {
12032 mipLevel = mipLevel_;
12033 return *this;
12034 }
12035
12036 ImageSubresource& setArrayLayer( uint32_t arrayLayer_ )
12037 {
12038 arrayLayer = arrayLayer_;
12039 return *this;
12040 }
12041
12042 operator const VkImageSubresource&() const
12043 {
12044 return *reinterpret_cast<const VkImageSubresource*>(this);
12045 }
12046
12047 bool operator==( ImageSubresource const& rhs ) const
12048 {
12049 return ( aspectMask == rhs.aspectMask )
12050 && ( mipLevel == rhs.mipLevel )
12051 && ( arrayLayer == rhs.arrayLayer );
12052 }
12053
12054 bool operator!=( ImageSubresource const& rhs ) const
12055 {
12056 return !operator==( rhs );
12057 }
12058
12059 ImageAspectFlags aspectMask;
12060 uint32_t mipLevel;
12061 uint32_t arrayLayer;
12062 };
12063 static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
12064
12065 struct ImageSubresourceLayers
12066 {
12067 ImageSubresourceLayers( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t mipLevel_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
12068 : aspectMask( aspectMask_ )
12069 , mipLevel( mipLevel_ )
12070 , baseArrayLayer( baseArrayLayer_ )
12071 , layerCount( layerCount_ )
12072 {
12073 }
12074
12075 ImageSubresourceLayers( VkImageSubresourceLayers const & rhs )
12076 {
12077 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
12078 }
12079
12080 ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs )
12081 {
12082 memcpy( this, &rhs, sizeof(ImageSubresourceLayers) );
12083 return *this;
12084 }
12085
12086 ImageSubresourceLayers& setAspectMask( ImageAspectFlags aspectMask_ )
12087 {
12088 aspectMask = aspectMask_;
12089 return *this;
12090 }
12091
12092 ImageSubresourceLayers& setMipLevel( uint32_t mipLevel_ )
12093 {
12094 mipLevel = mipLevel_;
12095 return *this;
12096 }
12097
12098 ImageSubresourceLayers& setBaseArrayLayer( uint32_t baseArrayLayer_ )
12099 {
12100 baseArrayLayer = baseArrayLayer_;
12101 return *this;
12102 }
12103
12104 ImageSubresourceLayers& setLayerCount( uint32_t layerCount_ )
12105 {
12106 layerCount = layerCount_;
12107 return *this;
12108 }
12109
12110 operator const VkImageSubresourceLayers&() const
12111 {
12112 return *reinterpret_cast<const VkImageSubresourceLayers*>(this);
12113 }
12114
12115 bool operator==( ImageSubresourceLayers const& rhs ) const
12116 {
12117 return ( aspectMask == rhs.aspectMask )
12118 && ( mipLevel == rhs.mipLevel )
12119 && ( baseArrayLayer == rhs.baseArrayLayer )
12120 && ( layerCount == rhs.layerCount );
12121 }
12122
12123 bool operator!=( ImageSubresourceLayers const& rhs ) const
12124 {
12125 return !operator==( rhs );
12126 }
12127
12128 ImageAspectFlags aspectMask;
12129 uint32_t mipLevel;
12130 uint32_t baseArrayLayer;
12131 uint32_t layerCount;
12132 };
12133 static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
12134
12135 struct ImageSubresourceRange
12136 {
12137 ImageSubresourceRange( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t baseMipLevel_ = 0, uint32_t levelCount_ = 0, uint32_t baseArrayLayer_ = 0, uint32_t layerCount_ = 0 )
12138 : aspectMask( aspectMask_ )
12139 , baseMipLevel( baseMipLevel_ )
12140 , levelCount( levelCount_ )
12141 , baseArrayLayer( baseArrayLayer_ )
12142 , layerCount( layerCount_ )
12143 {
12144 }
12145
12146 ImageSubresourceRange( VkImageSubresourceRange const & rhs )
12147 {
12148 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
12149 }
12150
12151 ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs )
12152 {
12153 memcpy( this, &rhs, sizeof(ImageSubresourceRange) );
12154 return *this;
12155 }
12156
12157 ImageSubresourceRange& setAspectMask( ImageAspectFlags aspectMask_ )
12158 {
12159 aspectMask = aspectMask_;
12160 return *this;
12161 }
12162
12163 ImageSubresourceRange& setBaseMipLevel( uint32_t baseMipLevel_ )
12164 {
12165 baseMipLevel = baseMipLevel_;
12166 return *this;
12167 }
12168
12169 ImageSubresourceRange& setLevelCount( uint32_t levelCount_ )
12170 {
12171 levelCount = levelCount_;
12172 return *this;
12173 }
12174
12175 ImageSubresourceRange& setBaseArrayLayer( uint32_t baseArrayLayer_ )
12176 {
12177 baseArrayLayer = baseArrayLayer_;
12178 return *this;
12179 }
12180
12181 ImageSubresourceRange& setLayerCount( uint32_t layerCount_ )
12182 {
12183 layerCount = layerCount_;
12184 return *this;
12185 }
12186
12187 operator const VkImageSubresourceRange&() const
12188 {
12189 return *reinterpret_cast<const VkImageSubresourceRange*>(this);
12190 }
12191
12192 bool operator==( ImageSubresourceRange const& rhs ) const
12193 {
12194 return ( aspectMask == rhs.aspectMask )
12195 && ( baseMipLevel == rhs.baseMipLevel )
12196 && ( levelCount == rhs.levelCount )
12197 && ( baseArrayLayer == rhs.baseArrayLayer )
12198 && ( layerCount == rhs.layerCount );
12199 }
12200
12201 bool operator!=( ImageSubresourceRange const& rhs ) const
12202 {
12203 return !operator==( rhs );
12204 }
12205
12206 ImageAspectFlags aspectMask;
12207 uint32_t baseMipLevel;
12208 uint32_t levelCount;
12209 uint32_t baseArrayLayer;
12210 uint32_t layerCount;
12211 };
12212 static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
12213
12214 struct ImageMemoryBarrier
12215 {
12216 ImageMemoryBarrier( AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = 0, uint32_t dstQueueFamilyIndex_ = 0, Image image_ = Image(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
12217 : sType( StructureType::eImageMemoryBarrier )
12218 , pNext( nullptr )
12219 , srcAccessMask( srcAccessMask_ )
12220 , dstAccessMask( dstAccessMask_ )
12221 , oldLayout( oldLayout_ )
12222 , newLayout( newLayout_ )
12223 , srcQueueFamilyIndex( srcQueueFamilyIndex_ )
12224 , dstQueueFamilyIndex( dstQueueFamilyIndex_ )
12225 , image( image_ )
12226 , subresourceRange( subresourceRange_ )
12227 {
12228 }
12229
12230 ImageMemoryBarrier( VkImageMemoryBarrier const & rhs )
12231 {
12232 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
12233 }
12234
12235 ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs )
12236 {
12237 memcpy( this, &rhs, sizeof(ImageMemoryBarrier) );
12238 return *this;
12239 }
12240
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012241 ImageMemoryBarrier& setPNext( const void* pNext_ )
12242 {
12243 pNext = pNext_;
12244 return *this;
12245 }
12246
12247 ImageMemoryBarrier& setSrcAccessMask( AccessFlags srcAccessMask_ )
12248 {
12249 srcAccessMask = srcAccessMask_;
12250 return *this;
12251 }
12252
12253 ImageMemoryBarrier& setDstAccessMask( AccessFlags dstAccessMask_ )
12254 {
12255 dstAccessMask = dstAccessMask_;
12256 return *this;
12257 }
12258
12259 ImageMemoryBarrier& setOldLayout( ImageLayout oldLayout_ )
12260 {
12261 oldLayout = oldLayout_;
12262 return *this;
12263 }
12264
12265 ImageMemoryBarrier& setNewLayout( ImageLayout newLayout_ )
12266 {
12267 newLayout = newLayout_;
12268 return *this;
12269 }
12270
12271 ImageMemoryBarrier& setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ )
12272 {
12273 srcQueueFamilyIndex = srcQueueFamilyIndex_;
12274 return *this;
12275 }
12276
12277 ImageMemoryBarrier& setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ )
12278 {
12279 dstQueueFamilyIndex = dstQueueFamilyIndex_;
12280 return *this;
12281 }
12282
12283 ImageMemoryBarrier& setImage( Image image_ )
12284 {
12285 image = image_;
12286 return *this;
12287 }
12288
12289 ImageMemoryBarrier& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
12290 {
12291 subresourceRange = subresourceRange_;
12292 return *this;
12293 }
12294
12295 operator const VkImageMemoryBarrier&() const
12296 {
12297 return *reinterpret_cast<const VkImageMemoryBarrier*>(this);
12298 }
12299
12300 bool operator==( ImageMemoryBarrier const& rhs ) const
12301 {
12302 return ( sType == rhs.sType )
12303 && ( pNext == rhs.pNext )
12304 && ( srcAccessMask == rhs.srcAccessMask )
12305 && ( dstAccessMask == rhs.dstAccessMask )
12306 && ( oldLayout == rhs.oldLayout )
12307 && ( newLayout == rhs.newLayout )
12308 && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
12309 && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
12310 && ( image == rhs.image )
12311 && ( subresourceRange == rhs.subresourceRange );
12312 }
12313
12314 bool operator!=( ImageMemoryBarrier const& rhs ) const
12315 {
12316 return !operator==( rhs );
12317 }
12318
12319 private:
12320 StructureType sType;
12321
12322 public:
12323 const void* pNext;
12324 AccessFlags srcAccessMask;
12325 AccessFlags dstAccessMask;
12326 ImageLayout oldLayout;
12327 ImageLayout newLayout;
12328 uint32_t srcQueueFamilyIndex;
12329 uint32_t dstQueueFamilyIndex;
12330 Image image;
12331 ImageSubresourceRange subresourceRange;
12332 };
12333 static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
12334
12335 struct ImageViewCreateInfo
12336 {
12337 ImageViewCreateInfo( ImageViewCreateFlags flags_ = ImageViewCreateFlags(), Image image_ = Image(), ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = ComponentMapping(), ImageSubresourceRange subresourceRange_ = ImageSubresourceRange() )
12338 : sType( StructureType::eImageViewCreateInfo )
12339 , pNext( nullptr )
12340 , flags( flags_ )
12341 , image( image_ )
12342 , viewType( viewType_ )
12343 , format( format_ )
12344 , components( components_ )
12345 , subresourceRange( subresourceRange_ )
12346 {
12347 }
12348
12349 ImageViewCreateInfo( VkImageViewCreateInfo const & rhs )
12350 {
12351 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
12352 }
12353
12354 ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs )
12355 {
12356 memcpy( this, &rhs, sizeof(ImageViewCreateInfo) );
12357 return *this;
12358 }
12359
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012360 ImageViewCreateInfo& setPNext( const void* pNext_ )
12361 {
12362 pNext = pNext_;
12363 return *this;
12364 }
12365
12366 ImageViewCreateInfo& setFlags( ImageViewCreateFlags flags_ )
12367 {
12368 flags = flags_;
12369 return *this;
12370 }
12371
12372 ImageViewCreateInfo& setImage( Image image_ )
12373 {
12374 image = image_;
12375 return *this;
12376 }
12377
12378 ImageViewCreateInfo& setViewType( ImageViewType viewType_ )
12379 {
12380 viewType = viewType_;
12381 return *this;
12382 }
12383
12384 ImageViewCreateInfo& setFormat( Format format_ )
12385 {
12386 format = format_;
12387 return *this;
12388 }
12389
12390 ImageViewCreateInfo& setComponents( ComponentMapping components_ )
12391 {
12392 components = components_;
12393 return *this;
12394 }
12395
12396 ImageViewCreateInfo& setSubresourceRange( ImageSubresourceRange subresourceRange_ )
12397 {
12398 subresourceRange = subresourceRange_;
12399 return *this;
12400 }
12401
12402 operator const VkImageViewCreateInfo&() const
12403 {
12404 return *reinterpret_cast<const VkImageViewCreateInfo*>(this);
12405 }
12406
12407 bool operator==( ImageViewCreateInfo const& rhs ) const
12408 {
12409 return ( sType == rhs.sType )
12410 && ( pNext == rhs.pNext )
12411 && ( flags == rhs.flags )
12412 && ( image == rhs.image )
12413 && ( viewType == rhs.viewType )
12414 && ( format == rhs.format )
12415 && ( components == rhs.components )
12416 && ( subresourceRange == rhs.subresourceRange );
12417 }
12418
12419 bool operator!=( ImageViewCreateInfo const& rhs ) const
12420 {
12421 return !operator==( rhs );
12422 }
12423
12424 private:
12425 StructureType sType;
12426
12427 public:
12428 const void* pNext;
12429 ImageViewCreateFlags flags;
12430 Image image;
12431 ImageViewType viewType;
12432 Format format;
12433 ComponentMapping components;
12434 ImageSubresourceRange subresourceRange;
12435 };
12436 static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
12437
12438 struct ImageCopy
12439 {
12440 ImageCopy( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12441 : srcSubresource( srcSubresource_ )
12442 , srcOffset( srcOffset_ )
12443 , dstSubresource( dstSubresource_ )
12444 , dstOffset( dstOffset_ )
12445 , extent( extent_ )
12446 {
12447 }
12448
12449 ImageCopy( VkImageCopy const & rhs )
12450 {
12451 memcpy( this, &rhs, sizeof(ImageCopy) );
12452 }
12453
12454 ImageCopy& operator=( VkImageCopy const & rhs )
12455 {
12456 memcpy( this, &rhs, sizeof(ImageCopy) );
12457 return *this;
12458 }
12459
12460 ImageCopy& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12461 {
12462 srcSubresource = srcSubresource_;
12463 return *this;
12464 }
12465
12466 ImageCopy& setSrcOffset( Offset3D srcOffset_ )
12467 {
12468 srcOffset = srcOffset_;
12469 return *this;
12470 }
12471
12472 ImageCopy& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12473 {
12474 dstSubresource = dstSubresource_;
12475 return *this;
12476 }
12477
12478 ImageCopy& setDstOffset( Offset3D dstOffset_ )
12479 {
12480 dstOffset = dstOffset_;
12481 return *this;
12482 }
12483
12484 ImageCopy& setExtent( Extent3D extent_ )
12485 {
12486 extent = extent_;
12487 return *this;
12488 }
12489
12490 operator const VkImageCopy&() const
12491 {
12492 return *reinterpret_cast<const VkImageCopy*>(this);
12493 }
12494
12495 bool operator==( ImageCopy const& rhs ) const
12496 {
12497 return ( srcSubresource == rhs.srcSubresource )
12498 && ( srcOffset == rhs.srcOffset )
12499 && ( dstSubresource == rhs.dstSubresource )
12500 && ( dstOffset == rhs.dstOffset )
12501 && ( extent == rhs.extent );
12502 }
12503
12504 bool operator!=( ImageCopy const& rhs ) const
12505 {
12506 return !operator==( rhs );
12507 }
12508
12509 ImageSubresourceLayers srcSubresource;
12510 Offset3D srcOffset;
12511 ImageSubresourceLayers dstSubresource;
12512 Offset3D dstOffset;
12513 Extent3D extent;
12514 };
12515 static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
12516
12517 struct ImageBlit
12518 {
12519 ImageBlit( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& srcOffsets_ = { { Offset3D(), Offset3D() } }, ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), std::array<Offset3D,2> const& dstOffsets_ = { { Offset3D(), Offset3D() } } )
12520 : srcSubresource( srcSubresource_ )
12521 , dstSubresource( dstSubresource_ )
12522 {
12523 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12524 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12525 }
12526
12527 ImageBlit( VkImageBlit const & rhs )
12528 {
12529 memcpy( this, &rhs, sizeof(ImageBlit) );
12530 }
12531
12532 ImageBlit& operator=( VkImageBlit const & rhs )
12533 {
12534 memcpy( this, &rhs, sizeof(ImageBlit) );
12535 return *this;
12536 }
12537
12538 ImageBlit& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12539 {
12540 srcSubresource = srcSubresource_;
12541 return *this;
12542 }
12543
12544 ImageBlit& setSrcOffsets( std::array<Offset3D,2> srcOffsets_ )
12545 {
12546 memcpy( &srcOffsets, srcOffsets_.data(), 2 * sizeof( Offset3D ) );
12547 return *this;
12548 }
12549
12550 ImageBlit& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12551 {
12552 dstSubresource = dstSubresource_;
12553 return *this;
12554 }
12555
12556 ImageBlit& setDstOffsets( std::array<Offset3D,2> dstOffsets_ )
12557 {
12558 memcpy( &dstOffsets, dstOffsets_.data(), 2 * sizeof( Offset3D ) );
12559 return *this;
12560 }
12561
12562 operator const VkImageBlit&() const
12563 {
12564 return *reinterpret_cast<const VkImageBlit*>(this);
12565 }
12566
12567 bool operator==( ImageBlit const& rhs ) const
12568 {
12569 return ( srcSubresource == rhs.srcSubresource )
12570 && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( Offset3D ) ) == 0 )
12571 && ( dstSubresource == rhs.dstSubresource )
12572 && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( Offset3D ) ) == 0 );
12573 }
12574
12575 bool operator!=( ImageBlit const& rhs ) const
12576 {
12577 return !operator==( rhs );
12578 }
12579
12580 ImageSubresourceLayers srcSubresource;
12581 Offset3D srcOffsets[2];
12582 ImageSubresourceLayers dstSubresource;
12583 Offset3D dstOffsets[2];
12584 };
12585 static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
12586
12587 struct BufferImageCopy
12588 {
12589 BufferImageCopy( DeviceSize bufferOffset_ = 0, uint32_t bufferRowLength_ = 0, uint32_t bufferImageHeight_ = 0, ImageSubresourceLayers imageSubresource_ = ImageSubresourceLayers(), Offset3D imageOffset_ = Offset3D(), Extent3D imageExtent_ = Extent3D() )
12590 : bufferOffset( bufferOffset_ )
12591 , bufferRowLength( bufferRowLength_ )
12592 , bufferImageHeight( bufferImageHeight_ )
12593 , imageSubresource( imageSubresource_ )
12594 , imageOffset( imageOffset_ )
12595 , imageExtent( imageExtent_ )
12596 {
12597 }
12598
12599 BufferImageCopy( VkBufferImageCopy const & rhs )
12600 {
12601 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12602 }
12603
12604 BufferImageCopy& operator=( VkBufferImageCopy const & rhs )
12605 {
12606 memcpy( this, &rhs, sizeof(BufferImageCopy) );
12607 return *this;
12608 }
12609
12610 BufferImageCopy& setBufferOffset( DeviceSize bufferOffset_ )
12611 {
12612 bufferOffset = bufferOffset_;
12613 return *this;
12614 }
12615
12616 BufferImageCopy& setBufferRowLength( uint32_t bufferRowLength_ )
12617 {
12618 bufferRowLength = bufferRowLength_;
12619 return *this;
12620 }
12621
12622 BufferImageCopy& setBufferImageHeight( uint32_t bufferImageHeight_ )
12623 {
12624 bufferImageHeight = bufferImageHeight_;
12625 return *this;
12626 }
12627
12628 BufferImageCopy& setImageSubresource( ImageSubresourceLayers imageSubresource_ )
12629 {
12630 imageSubresource = imageSubresource_;
12631 return *this;
12632 }
12633
12634 BufferImageCopy& setImageOffset( Offset3D imageOffset_ )
12635 {
12636 imageOffset = imageOffset_;
12637 return *this;
12638 }
12639
12640 BufferImageCopy& setImageExtent( Extent3D imageExtent_ )
12641 {
12642 imageExtent = imageExtent_;
12643 return *this;
12644 }
12645
12646 operator const VkBufferImageCopy&() const
12647 {
12648 return *reinterpret_cast<const VkBufferImageCopy*>(this);
12649 }
12650
12651 bool operator==( BufferImageCopy const& rhs ) const
12652 {
12653 return ( bufferOffset == rhs.bufferOffset )
12654 && ( bufferRowLength == rhs.bufferRowLength )
12655 && ( bufferImageHeight == rhs.bufferImageHeight )
12656 && ( imageSubresource == rhs.imageSubresource )
12657 && ( imageOffset == rhs.imageOffset )
12658 && ( imageExtent == rhs.imageExtent );
12659 }
12660
12661 bool operator!=( BufferImageCopy const& rhs ) const
12662 {
12663 return !operator==( rhs );
12664 }
12665
12666 DeviceSize bufferOffset;
12667 uint32_t bufferRowLength;
12668 uint32_t bufferImageHeight;
12669 ImageSubresourceLayers imageSubresource;
12670 Offset3D imageOffset;
12671 Extent3D imageExtent;
12672 };
12673 static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
12674
12675 struct ImageResolve
12676 {
12677 ImageResolve( ImageSubresourceLayers srcSubresource_ = ImageSubresourceLayers(), Offset3D srcOffset_ = Offset3D(), ImageSubresourceLayers dstSubresource_ = ImageSubresourceLayers(), Offset3D dstOffset_ = Offset3D(), Extent3D extent_ = Extent3D() )
12678 : srcSubresource( srcSubresource_ )
12679 , srcOffset( srcOffset_ )
12680 , dstSubresource( dstSubresource_ )
12681 , dstOffset( dstOffset_ )
12682 , extent( extent_ )
12683 {
12684 }
12685
12686 ImageResolve( VkImageResolve const & rhs )
12687 {
12688 memcpy( this, &rhs, sizeof(ImageResolve) );
12689 }
12690
12691 ImageResolve& operator=( VkImageResolve const & rhs )
12692 {
12693 memcpy( this, &rhs, sizeof(ImageResolve) );
12694 return *this;
12695 }
12696
12697 ImageResolve& setSrcSubresource( ImageSubresourceLayers srcSubresource_ )
12698 {
12699 srcSubresource = srcSubresource_;
12700 return *this;
12701 }
12702
12703 ImageResolve& setSrcOffset( Offset3D srcOffset_ )
12704 {
12705 srcOffset = srcOffset_;
12706 return *this;
12707 }
12708
12709 ImageResolve& setDstSubresource( ImageSubresourceLayers dstSubresource_ )
12710 {
12711 dstSubresource = dstSubresource_;
12712 return *this;
12713 }
12714
12715 ImageResolve& setDstOffset( Offset3D dstOffset_ )
12716 {
12717 dstOffset = dstOffset_;
12718 return *this;
12719 }
12720
12721 ImageResolve& setExtent( Extent3D extent_ )
12722 {
12723 extent = extent_;
12724 return *this;
12725 }
12726
12727 operator const VkImageResolve&() const
12728 {
12729 return *reinterpret_cast<const VkImageResolve*>(this);
12730 }
12731
12732 bool operator==( ImageResolve const& rhs ) const
12733 {
12734 return ( srcSubresource == rhs.srcSubresource )
12735 && ( srcOffset == rhs.srcOffset )
12736 && ( dstSubresource == rhs.dstSubresource )
12737 && ( dstOffset == rhs.dstOffset )
12738 && ( extent == rhs.extent );
12739 }
12740
12741 bool operator!=( ImageResolve const& rhs ) const
12742 {
12743 return !operator==( rhs );
12744 }
12745
12746 ImageSubresourceLayers srcSubresource;
12747 Offset3D srcOffset;
12748 ImageSubresourceLayers dstSubresource;
12749 Offset3D dstOffset;
12750 Extent3D extent;
12751 };
12752 static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
12753
12754 struct ClearAttachment
12755 {
12756 ClearAttachment( ImageAspectFlags aspectMask_ = ImageAspectFlags(), uint32_t colorAttachment_ = 0, ClearValue clearValue_ = ClearValue() )
12757 : aspectMask( aspectMask_ )
12758 , colorAttachment( colorAttachment_ )
12759 , clearValue( clearValue_ )
12760 {
12761 }
12762
12763 ClearAttachment( VkClearAttachment const & rhs )
12764 {
12765 memcpy( this, &rhs, sizeof(ClearAttachment) );
12766 }
12767
12768 ClearAttachment& operator=( VkClearAttachment const & rhs )
12769 {
12770 memcpy( this, &rhs, sizeof(ClearAttachment) );
12771 return *this;
12772 }
12773
12774 ClearAttachment& setAspectMask( ImageAspectFlags aspectMask_ )
12775 {
12776 aspectMask = aspectMask_;
12777 return *this;
12778 }
12779
12780 ClearAttachment& setColorAttachment( uint32_t colorAttachment_ )
12781 {
12782 colorAttachment = colorAttachment_;
12783 return *this;
12784 }
12785
12786 ClearAttachment& setClearValue( ClearValue clearValue_ )
12787 {
12788 clearValue = clearValue_;
12789 return *this;
12790 }
12791
12792 operator const VkClearAttachment&() const
12793 {
12794 return *reinterpret_cast<const VkClearAttachment*>(this);
12795 }
12796
12797 ImageAspectFlags aspectMask;
12798 uint32_t colorAttachment;
12799 ClearValue clearValue;
12800 };
12801 static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
12802
12803 enum class SparseImageFormatFlagBits
12804 {
12805 eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
12806 eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
12807 eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
12808 };
12809
12810 using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits, VkSparseImageFormatFlags>;
12811
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012812 VULKAN_HPP_INLINE SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012813 {
12814 return SparseImageFormatFlags( bit0 ) | bit1;
12815 }
12816
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012817 VULKAN_HPP_INLINE SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits )
12818 {
12819 return ~( SparseImageFormatFlags( bits ) );
12820 }
12821
12822 template <> struct FlagTraits<SparseImageFormatFlagBits>
12823 {
12824 enum
12825 {
12826 allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
12827 };
12828 };
12829
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012830 struct SparseImageFormatProperties
12831 {
12832 operator const VkSparseImageFormatProperties&() const
12833 {
12834 return *reinterpret_cast<const VkSparseImageFormatProperties*>(this);
12835 }
12836
12837 bool operator==( SparseImageFormatProperties const& rhs ) const
12838 {
12839 return ( aspectMask == rhs.aspectMask )
12840 && ( imageGranularity == rhs.imageGranularity )
12841 && ( flags == rhs.flags );
12842 }
12843
12844 bool operator!=( SparseImageFormatProperties const& rhs ) const
12845 {
12846 return !operator==( rhs );
12847 }
12848
12849 ImageAspectFlags aspectMask;
12850 Extent3D imageGranularity;
12851 SparseImageFormatFlags flags;
12852 };
12853 static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
12854
12855 struct SparseImageMemoryRequirements
12856 {
12857 operator const VkSparseImageMemoryRequirements&() const
12858 {
12859 return *reinterpret_cast<const VkSparseImageMemoryRequirements*>(this);
12860 }
12861
12862 bool operator==( SparseImageMemoryRequirements const& rhs ) const
12863 {
12864 return ( formatProperties == rhs.formatProperties )
12865 && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
12866 && ( imageMipTailSize == rhs.imageMipTailSize )
12867 && ( imageMipTailOffset == rhs.imageMipTailOffset )
12868 && ( imageMipTailStride == rhs.imageMipTailStride );
12869 }
12870
12871 bool operator!=( SparseImageMemoryRequirements const& rhs ) const
12872 {
12873 return !operator==( rhs );
12874 }
12875
12876 SparseImageFormatProperties formatProperties;
12877 uint32_t imageMipTailFirstLod;
12878 DeviceSize imageMipTailSize;
12879 DeviceSize imageMipTailOffset;
12880 DeviceSize imageMipTailStride;
12881 };
12882 static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
12883
Mark Young39389872017-01-19 21:10:49 -070012884 struct SparseImageFormatProperties2KHR
12885 {
12886 operator const VkSparseImageFormatProperties2KHR&() const
12887 {
12888 return *reinterpret_cast<const VkSparseImageFormatProperties2KHR*>(this);
12889 }
12890
12891 bool operator==( SparseImageFormatProperties2KHR const& rhs ) const
12892 {
12893 return ( sType == rhs.sType )
12894 && ( pNext == rhs.pNext )
12895 && ( properties == rhs.properties );
12896 }
12897
12898 bool operator!=( SparseImageFormatProperties2KHR const& rhs ) const
12899 {
12900 return !operator==( rhs );
12901 }
12902
12903 private:
12904 StructureType sType;
12905
12906 public:
12907 void* pNext;
12908 SparseImageFormatProperties properties;
12909 };
12910 static_assert( sizeof( SparseImageFormatProperties2KHR ) == sizeof( VkSparseImageFormatProperties2KHR ), "struct and wrapper have different size!" );
12911
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012912 enum class SparseMemoryBindFlagBits
12913 {
12914 eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
12915 };
12916
12917 using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits, VkSparseMemoryBindFlags>;
12918
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012919 VULKAN_HPP_INLINE SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012920 {
12921 return SparseMemoryBindFlags( bit0 ) | bit1;
12922 }
12923
Mark Lobodzinski2d589822016-12-12 09:44:34 -070012924 VULKAN_HPP_INLINE SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits )
12925 {
12926 return ~( SparseMemoryBindFlags( bits ) );
12927 }
12928
12929 template <> struct FlagTraits<SparseMemoryBindFlagBits>
12930 {
12931 enum
12932 {
12933 allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
12934 };
12935 };
12936
Lenny Komowbed9b5c2016-08-11 11:23:15 -060012937 struct SparseMemoryBind
12938 {
12939 SparseMemoryBind( DeviceSize resourceOffset_ = 0, DeviceSize size_ = 0, DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
12940 : resourceOffset( resourceOffset_ )
12941 , size( size_ )
12942 , memory( memory_ )
12943 , memoryOffset( memoryOffset_ )
12944 , flags( flags_ )
12945 {
12946 }
12947
12948 SparseMemoryBind( VkSparseMemoryBind const & rhs )
12949 {
12950 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12951 }
12952
12953 SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs )
12954 {
12955 memcpy( this, &rhs, sizeof(SparseMemoryBind) );
12956 return *this;
12957 }
12958
12959 SparseMemoryBind& setResourceOffset( DeviceSize resourceOffset_ )
12960 {
12961 resourceOffset = resourceOffset_;
12962 return *this;
12963 }
12964
12965 SparseMemoryBind& setSize( DeviceSize size_ )
12966 {
12967 size = size_;
12968 return *this;
12969 }
12970
12971 SparseMemoryBind& setMemory( DeviceMemory memory_ )
12972 {
12973 memory = memory_;
12974 return *this;
12975 }
12976
12977 SparseMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
12978 {
12979 memoryOffset = memoryOffset_;
12980 return *this;
12981 }
12982
12983 SparseMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
12984 {
12985 flags = flags_;
12986 return *this;
12987 }
12988
12989 operator const VkSparseMemoryBind&() const
12990 {
12991 return *reinterpret_cast<const VkSparseMemoryBind*>(this);
12992 }
12993
12994 bool operator==( SparseMemoryBind const& rhs ) const
12995 {
12996 return ( resourceOffset == rhs.resourceOffset )
12997 && ( size == rhs.size )
12998 && ( memory == rhs.memory )
12999 && ( memoryOffset == rhs.memoryOffset )
13000 && ( flags == rhs.flags );
13001 }
13002
13003 bool operator!=( SparseMemoryBind const& rhs ) const
13004 {
13005 return !operator==( rhs );
13006 }
13007
13008 DeviceSize resourceOffset;
13009 DeviceSize size;
13010 DeviceMemory memory;
13011 DeviceSize memoryOffset;
13012 SparseMemoryBindFlags flags;
13013 };
13014 static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
13015
13016 struct SparseImageMemoryBind
13017 {
13018 SparseImageMemoryBind( ImageSubresource subresource_ = ImageSubresource(), Offset3D offset_ = Offset3D(), Extent3D extent_ = Extent3D(), DeviceMemory memory_ = DeviceMemory(), DeviceSize memoryOffset_ = 0, SparseMemoryBindFlags flags_ = SparseMemoryBindFlags() )
13019 : subresource( subresource_ )
13020 , offset( offset_ )
13021 , extent( extent_ )
13022 , memory( memory_ )
13023 , memoryOffset( memoryOffset_ )
13024 , flags( flags_ )
13025 {
13026 }
13027
13028 SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs )
13029 {
13030 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
13031 }
13032
13033 SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs )
13034 {
13035 memcpy( this, &rhs, sizeof(SparseImageMemoryBind) );
13036 return *this;
13037 }
13038
13039 SparseImageMemoryBind& setSubresource( ImageSubresource subresource_ )
13040 {
13041 subresource = subresource_;
13042 return *this;
13043 }
13044
13045 SparseImageMemoryBind& setOffset( Offset3D offset_ )
13046 {
13047 offset = offset_;
13048 return *this;
13049 }
13050
13051 SparseImageMemoryBind& setExtent( Extent3D extent_ )
13052 {
13053 extent = extent_;
13054 return *this;
13055 }
13056
13057 SparseImageMemoryBind& setMemory( DeviceMemory memory_ )
13058 {
13059 memory = memory_;
13060 return *this;
13061 }
13062
13063 SparseImageMemoryBind& setMemoryOffset( DeviceSize memoryOffset_ )
13064 {
13065 memoryOffset = memoryOffset_;
13066 return *this;
13067 }
13068
13069 SparseImageMemoryBind& setFlags( SparseMemoryBindFlags flags_ )
13070 {
13071 flags = flags_;
13072 return *this;
13073 }
13074
13075 operator const VkSparseImageMemoryBind&() const
13076 {
13077 return *reinterpret_cast<const VkSparseImageMemoryBind*>(this);
13078 }
13079
13080 bool operator==( SparseImageMemoryBind const& rhs ) const
13081 {
13082 return ( subresource == rhs.subresource )
13083 && ( offset == rhs.offset )
13084 && ( extent == rhs.extent )
13085 && ( memory == rhs.memory )
13086 && ( memoryOffset == rhs.memoryOffset )
13087 && ( flags == rhs.flags );
13088 }
13089
13090 bool operator!=( SparseImageMemoryBind const& rhs ) const
13091 {
13092 return !operator==( rhs );
13093 }
13094
13095 ImageSubresource subresource;
13096 Offset3D offset;
13097 Extent3D extent;
13098 DeviceMemory memory;
13099 DeviceSize memoryOffset;
13100 SparseMemoryBindFlags flags;
13101 };
13102 static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
13103
13104 struct SparseBufferMemoryBindInfo
13105 {
13106 SparseBufferMemoryBindInfo( Buffer buffer_ = Buffer(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
13107 : buffer( buffer_ )
13108 , bindCount( bindCount_ )
13109 , pBinds( pBinds_ )
13110 {
13111 }
13112
13113 SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs )
13114 {
13115 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
13116 }
13117
13118 SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs )
13119 {
13120 memcpy( this, &rhs, sizeof(SparseBufferMemoryBindInfo) );
13121 return *this;
13122 }
13123
13124 SparseBufferMemoryBindInfo& setBuffer( Buffer buffer_ )
13125 {
13126 buffer = buffer_;
13127 return *this;
13128 }
13129
13130 SparseBufferMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13131 {
13132 bindCount = bindCount_;
13133 return *this;
13134 }
13135
13136 SparseBufferMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
13137 {
13138 pBinds = pBinds_;
13139 return *this;
13140 }
13141
13142 operator const VkSparseBufferMemoryBindInfo&() const
13143 {
13144 return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>(this);
13145 }
13146
13147 bool operator==( SparseBufferMemoryBindInfo const& rhs ) const
13148 {
13149 return ( buffer == rhs.buffer )
13150 && ( bindCount == rhs.bindCount )
13151 && ( pBinds == rhs.pBinds );
13152 }
13153
13154 bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const
13155 {
13156 return !operator==( rhs );
13157 }
13158
13159 Buffer buffer;
13160 uint32_t bindCount;
13161 const SparseMemoryBind* pBinds;
13162 };
13163 static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
13164
13165 struct SparseImageOpaqueMemoryBindInfo
13166 {
13167 SparseImageOpaqueMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseMemoryBind* pBinds_ = nullptr )
13168 : image( image_ )
13169 , bindCount( bindCount_ )
13170 , pBinds( pBinds_ )
13171 {
13172 }
13173
13174 SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs )
13175 {
13176 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
13177 }
13178
13179 SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs )
13180 {
13181 memcpy( this, &rhs, sizeof(SparseImageOpaqueMemoryBindInfo) );
13182 return *this;
13183 }
13184
13185 SparseImageOpaqueMemoryBindInfo& setImage( Image image_ )
13186 {
13187 image = image_;
13188 return *this;
13189 }
13190
13191 SparseImageOpaqueMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13192 {
13193 bindCount = bindCount_;
13194 return *this;
13195 }
13196
13197 SparseImageOpaqueMemoryBindInfo& setPBinds( const SparseMemoryBind* pBinds_ )
13198 {
13199 pBinds = pBinds_;
13200 return *this;
13201 }
13202
13203 operator const VkSparseImageOpaqueMemoryBindInfo&() const
13204 {
13205 return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>(this);
13206 }
13207
13208 bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const
13209 {
13210 return ( image == rhs.image )
13211 && ( bindCount == rhs.bindCount )
13212 && ( pBinds == rhs.pBinds );
13213 }
13214
13215 bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const
13216 {
13217 return !operator==( rhs );
13218 }
13219
13220 Image image;
13221 uint32_t bindCount;
13222 const SparseMemoryBind* pBinds;
13223 };
13224 static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
13225
13226 struct SparseImageMemoryBindInfo
13227 {
13228 SparseImageMemoryBindInfo( Image image_ = Image(), uint32_t bindCount_ = 0, const SparseImageMemoryBind* pBinds_ = nullptr )
13229 : image( image_ )
13230 , bindCount( bindCount_ )
13231 , pBinds( pBinds_ )
13232 {
13233 }
13234
13235 SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs )
13236 {
13237 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
13238 }
13239
13240 SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs )
13241 {
13242 memcpy( this, &rhs, sizeof(SparseImageMemoryBindInfo) );
13243 return *this;
13244 }
13245
13246 SparseImageMemoryBindInfo& setImage( Image image_ )
13247 {
13248 image = image_;
13249 return *this;
13250 }
13251
13252 SparseImageMemoryBindInfo& setBindCount( uint32_t bindCount_ )
13253 {
13254 bindCount = bindCount_;
13255 return *this;
13256 }
13257
13258 SparseImageMemoryBindInfo& setPBinds( const SparseImageMemoryBind* pBinds_ )
13259 {
13260 pBinds = pBinds_;
13261 return *this;
13262 }
13263
13264 operator const VkSparseImageMemoryBindInfo&() const
13265 {
13266 return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>(this);
13267 }
13268
13269 bool operator==( SparseImageMemoryBindInfo const& rhs ) const
13270 {
13271 return ( image == rhs.image )
13272 && ( bindCount == rhs.bindCount )
13273 && ( pBinds == rhs.pBinds );
13274 }
13275
13276 bool operator!=( SparseImageMemoryBindInfo const& rhs ) const
13277 {
13278 return !operator==( rhs );
13279 }
13280
13281 Image image;
13282 uint32_t bindCount;
13283 const SparseImageMemoryBind* pBinds;
13284 };
13285 static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
13286
13287 struct BindSparseInfo
13288 {
13289 BindSparseInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, uint32_t bufferBindCount_ = 0, const SparseBufferMemoryBindInfo* pBufferBinds_ = nullptr, uint32_t imageOpaqueBindCount_ = 0, const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = nullptr, uint32_t imageBindCount_ = 0, const SparseImageMemoryBindInfo* pImageBinds_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
13290 : sType( StructureType::eBindSparseInfo )
13291 , pNext( nullptr )
13292 , waitSemaphoreCount( waitSemaphoreCount_ )
13293 , pWaitSemaphores( pWaitSemaphores_ )
13294 , bufferBindCount( bufferBindCount_ )
13295 , pBufferBinds( pBufferBinds_ )
13296 , imageOpaqueBindCount( imageOpaqueBindCount_ )
13297 , pImageOpaqueBinds( pImageOpaqueBinds_ )
13298 , imageBindCount( imageBindCount_ )
13299 , pImageBinds( pImageBinds_ )
13300 , signalSemaphoreCount( signalSemaphoreCount_ )
13301 , pSignalSemaphores( pSignalSemaphores_ )
13302 {
13303 }
13304
13305 BindSparseInfo( VkBindSparseInfo const & rhs )
13306 {
13307 memcpy( this, &rhs, sizeof(BindSparseInfo) );
13308 }
13309
13310 BindSparseInfo& operator=( VkBindSparseInfo const & rhs )
13311 {
13312 memcpy( this, &rhs, sizeof(BindSparseInfo) );
13313 return *this;
13314 }
13315
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013316 BindSparseInfo& setPNext( const void* pNext_ )
13317 {
13318 pNext = pNext_;
13319 return *this;
13320 }
13321
13322 BindSparseInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
13323 {
13324 waitSemaphoreCount = waitSemaphoreCount_;
13325 return *this;
13326 }
13327
13328 BindSparseInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
13329 {
13330 pWaitSemaphores = pWaitSemaphores_;
13331 return *this;
13332 }
13333
13334 BindSparseInfo& setBufferBindCount( uint32_t bufferBindCount_ )
13335 {
13336 bufferBindCount = bufferBindCount_;
13337 return *this;
13338 }
13339
13340 BindSparseInfo& setPBufferBinds( const SparseBufferMemoryBindInfo* pBufferBinds_ )
13341 {
13342 pBufferBinds = pBufferBinds_;
13343 return *this;
13344 }
13345
13346 BindSparseInfo& setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ )
13347 {
13348 imageOpaqueBindCount = imageOpaqueBindCount_;
13349 return *this;
13350 }
13351
13352 BindSparseInfo& setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ )
13353 {
13354 pImageOpaqueBinds = pImageOpaqueBinds_;
13355 return *this;
13356 }
13357
13358 BindSparseInfo& setImageBindCount( uint32_t imageBindCount_ )
13359 {
13360 imageBindCount = imageBindCount_;
13361 return *this;
13362 }
13363
13364 BindSparseInfo& setPImageBinds( const SparseImageMemoryBindInfo* pImageBinds_ )
13365 {
13366 pImageBinds = pImageBinds_;
13367 return *this;
13368 }
13369
13370 BindSparseInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
13371 {
13372 signalSemaphoreCount = signalSemaphoreCount_;
13373 return *this;
13374 }
13375
13376 BindSparseInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
13377 {
13378 pSignalSemaphores = pSignalSemaphores_;
13379 return *this;
13380 }
13381
13382 operator const VkBindSparseInfo&() const
13383 {
13384 return *reinterpret_cast<const VkBindSparseInfo*>(this);
13385 }
13386
13387 bool operator==( BindSparseInfo const& rhs ) const
13388 {
13389 return ( sType == rhs.sType )
13390 && ( pNext == rhs.pNext )
13391 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
13392 && ( pWaitSemaphores == rhs.pWaitSemaphores )
13393 && ( bufferBindCount == rhs.bufferBindCount )
13394 && ( pBufferBinds == rhs.pBufferBinds )
13395 && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
13396 && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
13397 && ( imageBindCount == rhs.imageBindCount )
13398 && ( pImageBinds == rhs.pImageBinds )
13399 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
13400 && ( pSignalSemaphores == rhs.pSignalSemaphores );
13401 }
13402
13403 bool operator!=( BindSparseInfo const& rhs ) const
13404 {
13405 return !operator==( rhs );
13406 }
13407
13408 private:
13409 StructureType sType;
13410
13411 public:
13412 const void* pNext;
13413 uint32_t waitSemaphoreCount;
13414 const Semaphore* pWaitSemaphores;
13415 uint32_t bufferBindCount;
13416 const SparseBufferMemoryBindInfo* pBufferBinds;
13417 uint32_t imageOpaqueBindCount;
13418 const SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds;
13419 uint32_t imageBindCount;
13420 const SparseImageMemoryBindInfo* pImageBinds;
13421 uint32_t signalSemaphoreCount;
13422 const Semaphore* pSignalSemaphores;
13423 };
13424 static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
13425
13426 enum class PipelineStageFlagBits
13427 {
13428 eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
13429 eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
13430 eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
13431 eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
13432 eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
13433 eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
13434 eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
13435 eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
13436 eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
13437 eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
13438 eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
13439 eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
13440 eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
13441 eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
13442 eHost = VK_PIPELINE_STAGE_HOST_BIT,
13443 eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013444 eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
13445 eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013446 };
13447
13448 using PipelineStageFlags = Flags<PipelineStageFlagBits, VkPipelineStageFlags>;
13449
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013450 VULKAN_HPP_INLINE PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013451 {
13452 return PipelineStageFlags( bit0 ) | bit1;
13453 }
13454
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013455 VULKAN_HPP_INLINE PipelineStageFlags operator~( PipelineStageFlagBits bits )
13456 {
13457 return ~( PipelineStageFlags( bits ) );
13458 }
13459
13460 template <> struct FlagTraits<PipelineStageFlagBits>
13461 {
13462 enum
13463 {
13464 allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX)
13465 };
13466 };
13467
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013468 enum class CommandPoolCreateFlagBits
13469 {
13470 eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
13471 eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
13472 };
13473
13474 using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits, VkCommandPoolCreateFlags>;
13475
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013476 VULKAN_HPP_INLINE CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013477 {
13478 return CommandPoolCreateFlags( bit0 ) | bit1;
13479 }
13480
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013481 VULKAN_HPP_INLINE CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits )
13482 {
13483 return ~( CommandPoolCreateFlags( bits ) );
13484 }
13485
13486 template <> struct FlagTraits<CommandPoolCreateFlagBits>
13487 {
13488 enum
13489 {
13490 allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer)
13491 };
13492 };
13493
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013494 struct CommandPoolCreateInfo
13495 {
13496 CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = CommandPoolCreateFlags(), uint32_t queueFamilyIndex_ = 0 )
13497 : sType( StructureType::eCommandPoolCreateInfo )
13498 , pNext( nullptr )
13499 , flags( flags_ )
13500 , queueFamilyIndex( queueFamilyIndex_ )
13501 {
13502 }
13503
13504 CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs )
13505 {
13506 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13507 }
13508
13509 CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs )
13510 {
13511 memcpy( this, &rhs, sizeof(CommandPoolCreateInfo) );
13512 return *this;
13513 }
13514
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013515 CommandPoolCreateInfo& setPNext( const void* pNext_ )
13516 {
13517 pNext = pNext_;
13518 return *this;
13519 }
13520
13521 CommandPoolCreateInfo& setFlags( CommandPoolCreateFlags flags_ )
13522 {
13523 flags = flags_;
13524 return *this;
13525 }
13526
13527 CommandPoolCreateInfo& setQueueFamilyIndex( uint32_t queueFamilyIndex_ )
13528 {
13529 queueFamilyIndex = queueFamilyIndex_;
13530 return *this;
13531 }
13532
13533 operator const VkCommandPoolCreateInfo&() const
13534 {
13535 return *reinterpret_cast<const VkCommandPoolCreateInfo*>(this);
13536 }
13537
13538 bool operator==( CommandPoolCreateInfo const& rhs ) const
13539 {
13540 return ( sType == rhs.sType )
13541 && ( pNext == rhs.pNext )
13542 && ( flags == rhs.flags )
13543 && ( queueFamilyIndex == rhs.queueFamilyIndex );
13544 }
13545
13546 bool operator!=( CommandPoolCreateInfo const& rhs ) const
13547 {
13548 return !operator==( rhs );
13549 }
13550
13551 private:
13552 StructureType sType;
13553
13554 public:
13555 const void* pNext;
13556 CommandPoolCreateFlags flags;
13557 uint32_t queueFamilyIndex;
13558 };
13559 static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
13560
13561 enum class CommandPoolResetFlagBits
13562 {
13563 eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
13564 };
13565
13566 using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits, VkCommandPoolResetFlags>;
13567
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013568 VULKAN_HPP_INLINE CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013569 {
13570 return CommandPoolResetFlags( bit0 ) | bit1;
13571 }
13572
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013573 VULKAN_HPP_INLINE CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits )
13574 {
13575 return ~( CommandPoolResetFlags( bits ) );
13576 }
13577
13578 template <> struct FlagTraits<CommandPoolResetFlagBits>
13579 {
13580 enum
13581 {
13582 allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
13583 };
13584 };
13585
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013586 enum class CommandBufferResetFlagBits
13587 {
13588 eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
13589 };
13590
13591 using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits, VkCommandBufferResetFlags>;
13592
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013593 VULKAN_HPP_INLINE CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013594 {
13595 return CommandBufferResetFlags( bit0 ) | bit1;
13596 }
13597
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013598 VULKAN_HPP_INLINE CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits )
13599 {
13600 return ~( CommandBufferResetFlags( bits ) );
13601 }
13602
13603 template <> struct FlagTraits<CommandBufferResetFlagBits>
13604 {
13605 enum
13606 {
13607 allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
13608 };
13609 };
13610
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013611 enum class SampleCountFlagBits
13612 {
13613 e1 = VK_SAMPLE_COUNT_1_BIT,
13614 e2 = VK_SAMPLE_COUNT_2_BIT,
13615 e4 = VK_SAMPLE_COUNT_4_BIT,
13616 e8 = VK_SAMPLE_COUNT_8_BIT,
13617 e16 = VK_SAMPLE_COUNT_16_BIT,
13618 e32 = VK_SAMPLE_COUNT_32_BIT,
13619 e64 = VK_SAMPLE_COUNT_64_BIT
13620 };
13621
13622 using SampleCountFlags = Flags<SampleCountFlagBits, VkSampleCountFlags>;
13623
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013624 VULKAN_HPP_INLINE SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013625 {
13626 return SampleCountFlags( bit0 ) | bit1;
13627 }
13628
Mark Lobodzinski2d589822016-12-12 09:44:34 -070013629 VULKAN_HPP_INLINE SampleCountFlags operator~( SampleCountFlagBits bits )
13630 {
13631 return ~( SampleCountFlags( bits ) );
13632 }
13633
13634 template <> struct FlagTraits<SampleCountFlagBits>
13635 {
13636 enum
13637 {
13638 allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64)
13639 };
13640 };
13641
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013642 struct ImageFormatProperties
13643 {
13644 operator const VkImageFormatProperties&() const
13645 {
13646 return *reinterpret_cast<const VkImageFormatProperties*>(this);
13647 }
13648
13649 bool operator==( ImageFormatProperties const& rhs ) const
13650 {
13651 return ( maxExtent == rhs.maxExtent )
13652 && ( maxMipLevels == rhs.maxMipLevels )
13653 && ( maxArrayLayers == rhs.maxArrayLayers )
13654 && ( sampleCounts == rhs.sampleCounts )
13655 && ( maxResourceSize == rhs.maxResourceSize );
13656 }
13657
13658 bool operator!=( ImageFormatProperties const& rhs ) const
13659 {
13660 return !operator==( rhs );
13661 }
13662
13663 Extent3D maxExtent;
13664 uint32_t maxMipLevels;
13665 uint32_t maxArrayLayers;
13666 SampleCountFlags sampleCounts;
13667 DeviceSize maxResourceSize;
13668 };
13669 static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
13670
13671 struct ImageCreateInfo
13672 {
13673 ImageCreateInfo( ImageCreateFlags flags_ = ImageCreateFlags(), ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = Extent3D(), uint32_t mipLevels_ = 0, uint32_t arrayLayers_ = 0, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = ImageUsageFlags(), SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, ImageLayout initialLayout_ = ImageLayout::eUndefined )
13674 : sType( StructureType::eImageCreateInfo )
13675 , pNext( nullptr )
13676 , flags( flags_ )
13677 , imageType( imageType_ )
13678 , format( format_ )
13679 , extent( extent_ )
13680 , mipLevels( mipLevels_ )
13681 , arrayLayers( arrayLayers_ )
13682 , samples( samples_ )
13683 , tiling( tiling_ )
13684 , usage( usage_ )
13685 , sharingMode( sharingMode_ )
13686 , queueFamilyIndexCount( queueFamilyIndexCount_ )
13687 , pQueueFamilyIndices( pQueueFamilyIndices_ )
13688 , initialLayout( initialLayout_ )
13689 {
13690 }
13691
13692 ImageCreateInfo( VkImageCreateInfo const & rhs )
13693 {
13694 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13695 }
13696
13697 ImageCreateInfo& operator=( VkImageCreateInfo const & rhs )
13698 {
13699 memcpy( this, &rhs, sizeof(ImageCreateInfo) );
13700 return *this;
13701 }
13702
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013703 ImageCreateInfo& setPNext( const void* pNext_ )
13704 {
13705 pNext = pNext_;
13706 return *this;
13707 }
13708
13709 ImageCreateInfo& setFlags( ImageCreateFlags flags_ )
13710 {
13711 flags = flags_;
13712 return *this;
13713 }
13714
13715 ImageCreateInfo& setImageType( ImageType imageType_ )
13716 {
13717 imageType = imageType_;
13718 return *this;
13719 }
13720
13721 ImageCreateInfo& setFormat( Format format_ )
13722 {
13723 format = format_;
13724 return *this;
13725 }
13726
13727 ImageCreateInfo& setExtent( Extent3D extent_ )
13728 {
13729 extent = extent_;
13730 return *this;
13731 }
13732
13733 ImageCreateInfo& setMipLevels( uint32_t mipLevels_ )
13734 {
13735 mipLevels = mipLevels_;
13736 return *this;
13737 }
13738
13739 ImageCreateInfo& setArrayLayers( uint32_t arrayLayers_ )
13740 {
13741 arrayLayers = arrayLayers_;
13742 return *this;
13743 }
13744
13745 ImageCreateInfo& setSamples( SampleCountFlagBits samples_ )
13746 {
13747 samples = samples_;
13748 return *this;
13749 }
13750
13751 ImageCreateInfo& setTiling( ImageTiling tiling_ )
13752 {
13753 tiling = tiling_;
13754 return *this;
13755 }
13756
13757 ImageCreateInfo& setUsage( ImageUsageFlags usage_ )
13758 {
13759 usage = usage_;
13760 return *this;
13761 }
13762
13763 ImageCreateInfo& setSharingMode( SharingMode sharingMode_ )
13764 {
13765 sharingMode = sharingMode_;
13766 return *this;
13767 }
13768
13769 ImageCreateInfo& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
13770 {
13771 queueFamilyIndexCount = queueFamilyIndexCount_;
13772 return *this;
13773 }
13774
13775 ImageCreateInfo& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
13776 {
13777 pQueueFamilyIndices = pQueueFamilyIndices_;
13778 return *this;
13779 }
13780
13781 ImageCreateInfo& setInitialLayout( ImageLayout initialLayout_ )
13782 {
13783 initialLayout = initialLayout_;
13784 return *this;
13785 }
13786
13787 operator const VkImageCreateInfo&() const
13788 {
13789 return *reinterpret_cast<const VkImageCreateInfo*>(this);
13790 }
13791
13792 bool operator==( ImageCreateInfo const& rhs ) const
13793 {
13794 return ( sType == rhs.sType )
13795 && ( pNext == rhs.pNext )
13796 && ( flags == rhs.flags )
13797 && ( imageType == rhs.imageType )
13798 && ( format == rhs.format )
13799 && ( extent == rhs.extent )
13800 && ( mipLevels == rhs.mipLevels )
13801 && ( arrayLayers == rhs.arrayLayers )
13802 && ( samples == rhs.samples )
13803 && ( tiling == rhs.tiling )
13804 && ( usage == rhs.usage )
13805 && ( sharingMode == rhs.sharingMode )
13806 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
13807 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
13808 && ( initialLayout == rhs.initialLayout );
13809 }
13810
13811 bool operator!=( ImageCreateInfo const& rhs ) const
13812 {
13813 return !operator==( rhs );
13814 }
13815
13816 private:
13817 StructureType sType;
13818
13819 public:
13820 const void* pNext;
13821 ImageCreateFlags flags;
13822 ImageType imageType;
13823 Format format;
13824 Extent3D extent;
13825 uint32_t mipLevels;
13826 uint32_t arrayLayers;
13827 SampleCountFlagBits samples;
13828 ImageTiling tiling;
13829 ImageUsageFlags usage;
13830 SharingMode sharingMode;
13831 uint32_t queueFamilyIndexCount;
13832 const uint32_t* pQueueFamilyIndices;
13833 ImageLayout initialLayout;
13834 };
13835 static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
13836
13837 struct PipelineMultisampleStateCreateInfo
13838 {
13839 PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = PipelineMultisampleStateCreateFlags(), SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = 0, float minSampleShading_ = 0, const SampleMask* pSampleMask_ = nullptr, Bool32 alphaToCoverageEnable_ = 0, Bool32 alphaToOneEnable_ = 0 )
13840 : sType( StructureType::ePipelineMultisampleStateCreateInfo )
13841 , pNext( nullptr )
13842 , flags( flags_ )
13843 , rasterizationSamples( rasterizationSamples_ )
13844 , sampleShadingEnable( sampleShadingEnable_ )
13845 , minSampleShading( minSampleShading_ )
13846 , pSampleMask( pSampleMask_ )
13847 , alphaToCoverageEnable( alphaToCoverageEnable_ )
13848 , alphaToOneEnable( alphaToOneEnable_ )
13849 {
13850 }
13851
13852 PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs )
13853 {
13854 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13855 }
13856
13857 PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs )
13858 {
13859 memcpy( this, &rhs, sizeof(PipelineMultisampleStateCreateInfo) );
13860 return *this;
13861 }
13862
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013863 PipelineMultisampleStateCreateInfo& setPNext( const void* pNext_ )
13864 {
13865 pNext = pNext_;
13866 return *this;
13867 }
13868
13869 PipelineMultisampleStateCreateInfo& setFlags( PipelineMultisampleStateCreateFlags flags_ )
13870 {
13871 flags = flags_;
13872 return *this;
13873 }
13874
13875 PipelineMultisampleStateCreateInfo& setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ )
13876 {
13877 rasterizationSamples = rasterizationSamples_;
13878 return *this;
13879 }
13880
13881 PipelineMultisampleStateCreateInfo& setSampleShadingEnable( Bool32 sampleShadingEnable_ )
13882 {
13883 sampleShadingEnable = sampleShadingEnable_;
13884 return *this;
13885 }
13886
13887 PipelineMultisampleStateCreateInfo& setMinSampleShading( float minSampleShading_ )
13888 {
13889 minSampleShading = minSampleShading_;
13890 return *this;
13891 }
13892
13893 PipelineMultisampleStateCreateInfo& setPSampleMask( const SampleMask* pSampleMask_ )
13894 {
13895 pSampleMask = pSampleMask_;
13896 return *this;
13897 }
13898
13899 PipelineMultisampleStateCreateInfo& setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ )
13900 {
13901 alphaToCoverageEnable = alphaToCoverageEnable_;
13902 return *this;
13903 }
13904
13905 PipelineMultisampleStateCreateInfo& setAlphaToOneEnable( Bool32 alphaToOneEnable_ )
13906 {
13907 alphaToOneEnable = alphaToOneEnable_;
13908 return *this;
13909 }
13910
13911 operator const VkPipelineMultisampleStateCreateInfo&() const
13912 {
13913 return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>(this);
13914 }
13915
13916 bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const
13917 {
13918 return ( sType == rhs.sType )
13919 && ( pNext == rhs.pNext )
13920 && ( flags == rhs.flags )
13921 && ( rasterizationSamples == rhs.rasterizationSamples )
13922 && ( sampleShadingEnable == rhs.sampleShadingEnable )
13923 && ( minSampleShading == rhs.minSampleShading )
13924 && ( pSampleMask == rhs.pSampleMask )
13925 && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
13926 && ( alphaToOneEnable == rhs.alphaToOneEnable );
13927 }
13928
13929 bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const
13930 {
13931 return !operator==( rhs );
13932 }
13933
13934 private:
13935 StructureType sType;
13936
13937 public:
13938 const void* pNext;
13939 PipelineMultisampleStateCreateFlags flags;
13940 SampleCountFlagBits rasterizationSamples;
13941 Bool32 sampleShadingEnable;
13942 float minSampleShading;
13943 const SampleMask* pSampleMask;
13944 Bool32 alphaToCoverageEnable;
13945 Bool32 alphaToOneEnable;
13946 };
13947 static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
13948
13949 struct GraphicsPipelineCreateInfo
13950 {
13951 GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = PipelineCreateFlags(), uint32_t stageCount_ = 0, const PipelineShaderStageCreateInfo* pStages_ = nullptr, const PipelineVertexInputStateCreateInfo* pVertexInputState_ = nullptr, const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = nullptr, const PipelineTessellationStateCreateInfo* pTessellationState_ = nullptr, const PipelineViewportStateCreateInfo* pViewportState_ = nullptr, const PipelineRasterizationStateCreateInfo* pRasterizationState_ = nullptr, const PipelineMultisampleStateCreateInfo* pMultisampleState_ = nullptr, const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = nullptr, const PipelineColorBlendStateCreateInfo* pColorBlendState_ = nullptr, const PipelineDynamicStateCreateInfo* pDynamicState_ = nullptr, PipelineLayout layout_ = PipelineLayout(), RenderPass renderPass_ = RenderPass(), uint32_t subpass_ = 0, Pipeline basePipelineHandle_ = Pipeline(), int32_t basePipelineIndex_ = 0 )
13952 : sType( StructureType::eGraphicsPipelineCreateInfo )
13953 , pNext( nullptr )
13954 , flags( flags_ )
13955 , stageCount( stageCount_ )
13956 , pStages( pStages_ )
13957 , pVertexInputState( pVertexInputState_ )
13958 , pInputAssemblyState( pInputAssemblyState_ )
13959 , pTessellationState( pTessellationState_ )
13960 , pViewportState( pViewportState_ )
13961 , pRasterizationState( pRasterizationState_ )
13962 , pMultisampleState( pMultisampleState_ )
13963 , pDepthStencilState( pDepthStencilState_ )
13964 , pColorBlendState( pColorBlendState_ )
13965 , pDynamicState( pDynamicState_ )
13966 , layout( layout_ )
13967 , renderPass( renderPass_ )
13968 , subpass( subpass_ )
13969 , basePipelineHandle( basePipelineHandle_ )
13970 , basePipelineIndex( basePipelineIndex_ )
13971 {
13972 }
13973
13974 GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs )
13975 {
13976 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13977 }
13978
13979 GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs )
13980 {
13981 memcpy( this, &rhs, sizeof(GraphicsPipelineCreateInfo) );
13982 return *this;
13983 }
13984
Lenny Komowbed9b5c2016-08-11 11:23:15 -060013985 GraphicsPipelineCreateInfo& setPNext( const void* pNext_ )
13986 {
13987 pNext = pNext_;
13988 return *this;
13989 }
13990
13991 GraphicsPipelineCreateInfo& setFlags( PipelineCreateFlags flags_ )
13992 {
13993 flags = flags_;
13994 return *this;
13995 }
13996
13997 GraphicsPipelineCreateInfo& setStageCount( uint32_t stageCount_ )
13998 {
13999 stageCount = stageCount_;
14000 return *this;
14001 }
14002
14003 GraphicsPipelineCreateInfo& setPStages( const PipelineShaderStageCreateInfo* pStages_ )
14004 {
14005 pStages = pStages_;
14006 return *this;
14007 }
14008
14009 GraphicsPipelineCreateInfo& setPVertexInputState( const PipelineVertexInputStateCreateInfo* pVertexInputState_ )
14010 {
14011 pVertexInputState = pVertexInputState_;
14012 return *this;
14013 }
14014
14015 GraphicsPipelineCreateInfo& setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ )
14016 {
14017 pInputAssemblyState = pInputAssemblyState_;
14018 return *this;
14019 }
14020
14021 GraphicsPipelineCreateInfo& setPTessellationState( const PipelineTessellationStateCreateInfo* pTessellationState_ )
14022 {
14023 pTessellationState = pTessellationState_;
14024 return *this;
14025 }
14026
14027 GraphicsPipelineCreateInfo& setPViewportState( const PipelineViewportStateCreateInfo* pViewportState_ )
14028 {
14029 pViewportState = pViewportState_;
14030 return *this;
14031 }
14032
14033 GraphicsPipelineCreateInfo& setPRasterizationState( const PipelineRasterizationStateCreateInfo* pRasterizationState_ )
14034 {
14035 pRasterizationState = pRasterizationState_;
14036 return *this;
14037 }
14038
14039 GraphicsPipelineCreateInfo& setPMultisampleState( const PipelineMultisampleStateCreateInfo* pMultisampleState_ )
14040 {
14041 pMultisampleState = pMultisampleState_;
14042 return *this;
14043 }
14044
14045 GraphicsPipelineCreateInfo& setPDepthStencilState( const PipelineDepthStencilStateCreateInfo* pDepthStencilState_ )
14046 {
14047 pDepthStencilState = pDepthStencilState_;
14048 return *this;
14049 }
14050
14051 GraphicsPipelineCreateInfo& setPColorBlendState( const PipelineColorBlendStateCreateInfo* pColorBlendState_ )
14052 {
14053 pColorBlendState = pColorBlendState_;
14054 return *this;
14055 }
14056
14057 GraphicsPipelineCreateInfo& setPDynamicState( const PipelineDynamicStateCreateInfo* pDynamicState_ )
14058 {
14059 pDynamicState = pDynamicState_;
14060 return *this;
14061 }
14062
14063 GraphicsPipelineCreateInfo& setLayout( PipelineLayout layout_ )
14064 {
14065 layout = layout_;
14066 return *this;
14067 }
14068
14069 GraphicsPipelineCreateInfo& setRenderPass( RenderPass renderPass_ )
14070 {
14071 renderPass = renderPass_;
14072 return *this;
14073 }
14074
14075 GraphicsPipelineCreateInfo& setSubpass( uint32_t subpass_ )
14076 {
14077 subpass = subpass_;
14078 return *this;
14079 }
14080
14081 GraphicsPipelineCreateInfo& setBasePipelineHandle( Pipeline basePipelineHandle_ )
14082 {
14083 basePipelineHandle = basePipelineHandle_;
14084 return *this;
14085 }
14086
14087 GraphicsPipelineCreateInfo& setBasePipelineIndex( int32_t basePipelineIndex_ )
14088 {
14089 basePipelineIndex = basePipelineIndex_;
14090 return *this;
14091 }
14092
14093 operator const VkGraphicsPipelineCreateInfo&() const
14094 {
14095 return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>(this);
14096 }
14097
14098 bool operator==( GraphicsPipelineCreateInfo const& rhs ) const
14099 {
14100 return ( sType == rhs.sType )
14101 && ( pNext == rhs.pNext )
14102 && ( flags == rhs.flags )
14103 && ( stageCount == rhs.stageCount )
14104 && ( pStages == rhs.pStages )
14105 && ( pVertexInputState == rhs.pVertexInputState )
14106 && ( pInputAssemblyState == rhs.pInputAssemblyState )
14107 && ( pTessellationState == rhs.pTessellationState )
14108 && ( pViewportState == rhs.pViewportState )
14109 && ( pRasterizationState == rhs.pRasterizationState )
14110 && ( pMultisampleState == rhs.pMultisampleState )
14111 && ( pDepthStencilState == rhs.pDepthStencilState )
14112 && ( pColorBlendState == rhs.pColorBlendState )
14113 && ( pDynamicState == rhs.pDynamicState )
14114 && ( layout == rhs.layout )
14115 && ( renderPass == rhs.renderPass )
14116 && ( subpass == rhs.subpass )
14117 && ( basePipelineHandle == rhs.basePipelineHandle )
14118 && ( basePipelineIndex == rhs.basePipelineIndex );
14119 }
14120
14121 bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const
14122 {
14123 return !operator==( rhs );
14124 }
14125
14126 private:
14127 StructureType sType;
14128
14129 public:
14130 const void* pNext;
14131 PipelineCreateFlags flags;
14132 uint32_t stageCount;
14133 const PipelineShaderStageCreateInfo* pStages;
14134 const PipelineVertexInputStateCreateInfo* pVertexInputState;
14135 const PipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
14136 const PipelineTessellationStateCreateInfo* pTessellationState;
14137 const PipelineViewportStateCreateInfo* pViewportState;
14138 const PipelineRasterizationStateCreateInfo* pRasterizationState;
14139 const PipelineMultisampleStateCreateInfo* pMultisampleState;
14140 const PipelineDepthStencilStateCreateInfo* pDepthStencilState;
14141 const PipelineColorBlendStateCreateInfo* pColorBlendState;
14142 const PipelineDynamicStateCreateInfo* pDynamicState;
14143 PipelineLayout layout;
14144 RenderPass renderPass;
14145 uint32_t subpass;
14146 Pipeline basePipelineHandle;
14147 int32_t basePipelineIndex;
14148 };
14149 static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
14150
14151 struct PhysicalDeviceLimits
14152 {
14153 operator const VkPhysicalDeviceLimits&() const
14154 {
14155 return *reinterpret_cast<const VkPhysicalDeviceLimits*>(this);
14156 }
14157
14158 bool operator==( PhysicalDeviceLimits const& rhs ) const
14159 {
14160 return ( maxImageDimension1D == rhs.maxImageDimension1D )
14161 && ( maxImageDimension2D == rhs.maxImageDimension2D )
14162 && ( maxImageDimension3D == rhs.maxImageDimension3D )
14163 && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
14164 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
14165 && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
14166 && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
14167 && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
14168 && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
14169 && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
14170 && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
14171 && ( bufferImageGranularity == rhs.bufferImageGranularity )
14172 && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
14173 && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
14174 && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
14175 && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
14176 && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
14177 && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
14178 && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
14179 && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
14180 && ( maxPerStageResources == rhs.maxPerStageResources )
14181 && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
14182 && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
14183 && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
14184 && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
14185 && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
14186 && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
14187 && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
14188 && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
14189 && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
14190 && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
14191 && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
14192 && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
14193 && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
14194 && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
14195 && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
14196 && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
14197 && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
14198 && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
14199 && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
14200 && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
14201 && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
14202 && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
14203 && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
14204 && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
14205 && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
14206 && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
14207 && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
14208 && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
14209 && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
14210 && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
14211 && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
14212 && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 )
14213 && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
14214 && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 )
14215 && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
14216 && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
14217 && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
14218 && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
14219 && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
14220 && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
14221 && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
14222 && ( maxViewports == rhs.maxViewports )
14223 && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 )
14224 && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 )
14225 && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
14226 && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
14227 && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
14228 && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
14229 && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
14230 && ( minTexelOffset == rhs.minTexelOffset )
14231 && ( maxTexelOffset == rhs.maxTexelOffset )
14232 && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
14233 && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
14234 && ( minInterpolationOffset == rhs.minInterpolationOffset )
14235 && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
14236 && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
14237 && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
14238 && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
14239 && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
14240 && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
14241 && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
14242 && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
14243 && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
14244 && ( maxColorAttachments == rhs.maxColorAttachments )
14245 && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
14246 && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
14247 && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
14248 && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
14249 && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
14250 && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
14251 && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
14252 && ( timestampPeriod == rhs.timestampPeriod )
14253 && ( maxClipDistances == rhs.maxClipDistances )
14254 && ( maxCullDistances == rhs.maxCullDistances )
14255 && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
14256 && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
14257 && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 )
14258 && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 )
14259 && ( pointSizeGranularity == rhs.pointSizeGranularity )
14260 && ( lineWidthGranularity == rhs.lineWidthGranularity )
14261 && ( strictLines == rhs.strictLines )
14262 && ( standardSampleLocations == rhs.standardSampleLocations )
14263 && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
14264 && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
14265 && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
14266 }
14267
14268 bool operator!=( PhysicalDeviceLimits const& rhs ) const
14269 {
14270 return !operator==( rhs );
14271 }
14272
14273 uint32_t maxImageDimension1D;
14274 uint32_t maxImageDimension2D;
14275 uint32_t maxImageDimension3D;
14276 uint32_t maxImageDimensionCube;
14277 uint32_t maxImageArrayLayers;
14278 uint32_t maxTexelBufferElements;
14279 uint32_t maxUniformBufferRange;
14280 uint32_t maxStorageBufferRange;
14281 uint32_t maxPushConstantsSize;
14282 uint32_t maxMemoryAllocationCount;
14283 uint32_t maxSamplerAllocationCount;
14284 DeviceSize bufferImageGranularity;
14285 DeviceSize sparseAddressSpaceSize;
14286 uint32_t maxBoundDescriptorSets;
14287 uint32_t maxPerStageDescriptorSamplers;
14288 uint32_t maxPerStageDescriptorUniformBuffers;
14289 uint32_t maxPerStageDescriptorStorageBuffers;
14290 uint32_t maxPerStageDescriptorSampledImages;
14291 uint32_t maxPerStageDescriptorStorageImages;
14292 uint32_t maxPerStageDescriptorInputAttachments;
14293 uint32_t maxPerStageResources;
14294 uint32_t maxDescriptorSetSamplers;
14295 uint32_t maxDescriptorSetUniformBuffers;
14296 uint32_t maxDescriptorSetUniformBuffersDynamic;
14297 uint32_t maxDescriptorSetStorageBuffers;
14298 uint32_t maxDescriptorSetStorageBuffersDynamic;
14299 uint32_t maxDescriptorSetSampledImages;
14300 uint32_t maxDescriptorSetStorageImages;
14301 uint32_t maxDescriptorSetInputAttachments;
14302 uint32_t maxVertexInputAttributes;
14303 uint32_t maxVertexInputBindings;
14304 uint32_t maxVertexInputAttributeOffset;
14305 uint32_t maxVertexInputBindingStride;
14306 uint32_t maxVertexOutputComponents;
14307 uint32_t maxTessellationGenerationLevel;
14308 uint32_t maxTessellationPatchSize;
14309 uint32_t maxTessellationControlPerVertexInputComponents;
14310 uint32_t maxTessellationControlPerVertexOutputComponents;
14311 uint32_t maxTessellationControlPerPatchOutputComponents;
14312 uint32_t maxTessellationControlTotalOutputComponents;
14313 uint32_t maxTessellationEvaluationInputComponents;
14314 uint32_t maxTessellationEvaluationOutputComponents;
14315 uint32_t maxGeometryShaderInvocations;
14316 uint32_t maxGeometryInputComponents;
14317 uint32_t maxGeometryOutputComponents;
14318 uint32_t maxGeometryOutputVertices;
14319 uint32_t maxGeometryTotalOutputComponents;
14320 uint32_t maxFragmentInputComponents;
14321 uint32_t maxFragmentOutputAttachments;
14322 uint32_t maxFragmentDualSrcAttachments;
14323 uint32_t maxFragmentCombinedOutputResources;
14324 uint32_t maxComputeSharedMemorySize;
14325 uint32_t maxComputeWorkGroupCount[3];
14326 uint32_t maxComputeWorkGroupInvocations;
14327 uint32_t maxComputeWorkGroupSize[3];
14328 uint32_t subPixelPrecisionBits;
14329 uint32_t subTexelPrecisionBits;
14330 uint32_t mipmapPrecisionBits;
14331 uint32_t maxDrawIndexedIndexValue;
14332 uint32_t maxDrawIndirectCount;
14333 float maxSamplerLodBias;
14334 float maxSamplerAnisotropy;
14335 uint32_t maxViewports;
14336 uint32_t maxViewportDimensions[2];
14337 float viewportBoundsRange[2];
14338 uint32_t viewportSubPixelBits;
14339 size_t minMemoryMapAlignment;
14340 DeviceSize minTexelBufferOffsetAlignment;
14341 DeviceSize minUniformBufferOffsetAlignment;
14342 DeviceSize minStorageBufferOffsetAlignment;
14343 int32_t minTexelOffset;
14344 uint32_t maxTexelOffset;
14345 int32_t minTexelGatherOffset;
14346 uint32_t maxTexelGatherOffset;
14347 float minInterpolationOffset;
14348 float maxInterpolationOffset;
14349 uint32_t subPixelInterpolationOffsetBits;
14350 uint32_t maxFramebufferWidth;
14351 uint32_t maxFramebufferHeight;
14352 uint32_t maxFramebufferLayers;
14353 SampleCountFlags framebufferColorSampleCounts;
14354 SampleCountFlags framebufferDepthSampleCounts;
14355 SampleCountFlags framebufferStencilSampleCounts;
14356 SampleCountFlags framebufferNoAttachmentsSampleCounts;
14357 uint32_t maxColorAttachments;
14358 SampleCountFlags sampledImageColorSampleCounts;
14359 SampleCountFlags sampledImageIntegerSampleCounts;
14360 SampleCountFlags sampledImageDepthSampleCounts;
14361 SampleCountFlags sampledImageStencilSampleCounts;
14362 SampleCountFlags storageImageSampleCounts;
14363 uint32_t maxSampleMaskWords;
14364 Bool32 timestampComputeAndGraphics;
14365 float timestampPeriod;
14366 uint32_t maxClipDistances;
14367 uint32_t maxCullDistances;
14368 uint32_t maxCombinedClipAndCullDistances;
14369 uint32_t discreteQueuePriorities;
14370 float pointSizeRange[2];
14371 float lineWidthRange[2];
14372 float pointSizeGranularity;
14373 float lineWidthGranularity;
14374 Bool32 strictLines;
14375 Bool32 standardSampleLocations;
14376 DeviceSize optimalBufferCopyOffsetAlignment;
14377 DeviceSize optimalBufferCopyRowPitchAlignment;
14378 DeviceSize nonCoherentAtomSize;
14379 };
14380 static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
14381
14382 struct PhysicalDeviceProperties
14383 {
14384 operator const VkPhysicalDeviceProperties&() const
14385 {
14386 return *reinterpret_cast<const VkPhysicalDeviceProperties*>(this);
14387 }
14388
14389 bool operator==( PhysicalDeviceProperties const& rhs ) const
14390 {
14391 return ( apiVersion == rhs.apiVersion )
14392 && ( driverVersion == rhs.driverVersion )
14393 && ( vendorID == rhs.vendorID )
14394 && ( deviceID == rhs.deviceID )
14395 && ( deviceType == rhs.deviceType )
14396 && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 )
14397 && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 )
14398 && ( limits == rhs.limits )
14399 && ( sparseProperties == rhs.sparseProperties );
14400 }
14401
14402 bool operator!=( PhysicalDeviceProperties const& rhs ) const
14403 {
14404 return !operator==( rhs );
14405 }
14406
14407 uint32_t apiVersion;
14408 uint32_t driverVersion;
14409 uint32_t vendorID;
14410 uint32_t deviceID;
14411 PhysicalDeviceType deviceType;
14412 char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
14413 uint8_t pipelineCacheUUID[VK_UUID_SIZE];
14414 PhysicalDeviceLimits limits;
14415 PhysicalDeviceSparseProperties sparseProperties;
14416 };
14417 static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
14418
Mark Young39389872017-01-19 21:10:49 -070014419 struct PhysicalDeviceProperties2KHR
14420 {
14421 operator const VkPhysicalDeviceProperties2KHR&() const
14422 {
14423 return *reinterpret_cast<const VkPhysicalDeviceProperties2KHR*>(this);
14424 }
14425
14426 bool operator==( PhysicalDeviceProperties2KHR const& rhs ) const
14427 {
14428 return ( sType == rhs.sType )
14429 && ( pNext == rhs.pNext )
14430 && ( properties == rhs.properties );
14431 }
14432
14433 bool operator!=( PhysicalDeviceProperties2KHR const& rhs ) const
14434 {
14435 return !operator==( rhs );
14436 }
14437
14438 private:
14439 StructureType sType;
14440
14441 public:
14442 void* pNext;
14443 PhysicalDeviceProperties properties;
14444 };
14445 static_assert( sizeof( PhysicalDeviceProperties2KHR ) == sizeof( VkPhysicalDeviceProperties2KHR ), "struct and wrapper have different size!" );
14446
14447 struct ImageFormatProperties2KHR
14448 {
14449 operator const VkImageFormatProperties2KHR&() const
14450 {
14451 return *reinterpret_cast<const VkImageFormatProperties2KHR*>(this);
14452 }
14453
14454 bool operator==( ImageFormatProperties2KHR const& rhs ) const
14455 {
14456 return ( sType == rhs.sType )
14457 && ( pNext == rhs.pNext )
14458 && ( imageFormatProperties == rhs.imageFormatProperties );
14459 }
14460
14461 bool operator!=( ImageFormatProperties2KHR const& rhs ) const
14462 {
14463 return !operator==( rhs );
14464 }
14465
14466 private:
14467 StructureType sType;
14468
14469 public:
14470 void* pNext;
14471 ImageFormatProperties imageFormatProperties;
14472 };
14473 static_assert( sizeof( ImageFormatProperties2KHR ) == sizeof( VkImageFormatProperties2KHR ), "struct and wrapper have different size!" );
14474
14475 struct PhysicalDeviceSparseImageFormatInfo2KHR
14476 {
14477 PhysicalDeviceSparseImageFormatInfo2KHR( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageUsageFlags usage_ = ImageUsageFlags(), ImageTiling tiling_ = ImageTiling::eOptimal )
14478 : sType( StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR )
14479 , pNext( nullptr )
14480 , format( format_ )
14481 , type( type_ )
14482 , samples( samples_ )
14483 , usage( usage_ )
14484 , tiling( tiling_ )
14485 {
14486 }
14487
14488 PhysicalDeviceSparseImageFormatInfo2KHR( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
14489 {
14490 memcpy( this, &rhs, sizeof(PhysicalDeviceSparseImageFormatInfo2KHR) );
14491 }
14492
14493 PhysicalDeviceSparseImageFormatInfo2KHR& operator=( VkPhysicalDeviceSparseImageFormatInfo2KHR const & rhs )
14494 {
14495 memcpy( this, &rhs, sizeof(PhysicalDeviceSparseImageFormatInfo2KHR) );
14496 return *this;
14497 }
14498
Mark Young39389872017-01-19 21:10:49 -070014499 PhysicalDeviceSparseImageFormatInfo2KHR& setPNext( const void* pNext_ )
14500 {
14501 pNext = pNext_;
14502 return *this;
14503 }
14504
14505 PhysicalDeviceSparseImageFormatInfo2KHR& setFormat( Format format_ )
14506 {
14507 format = format_;
14508 return *this;
14509 }
14510
14511 PhysicalDeviceSparseImageFormatInfo2KHR& setType( ImageType type_ )
14512 {
14513 type = type_;
14514 return *this;
14515 }
14516
14517 PhysicalDeviceSparseImageFormatInfo2KHR& setSamples( SampleCountFlagBits samples_ )
14518 {
14519 samples = samples_;
14520 return *this;
14521 }
14522
14523 PhysicalDeviceSparseImageFormatInfo2KHR& setUsage( ImageUsageFlags usage_ )
14524 {
14525 usage = usage_;
14526 return *this;
14527 }
14528
14529 PhysicalDeviceSparseImageFormatInfo2KHR& setTiling( ImageTiling tiling_ )
14530 {
14531 tiling = tiling_;
14532 return *this;
14533 }
14534
14535 operator const VkPhysicalDeviceSparseImageFormatInfo2KHR&() const
14536 {
14537 return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>(this);
14538 }
14539
14540 bool operator==( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
14541 {
14542 return ( sType == rhs.sType )
14543 && ( pNext == rhs.pNext )
14544 && ( format == rhs.format )
14545 && ( type == rhs.type )
14546 && ( samples == rhs.samples )
14547 && ( usage == rhs.usage )
14548 && ( tiling == rhs.tiling );
14549 }
14550
14551 bool operator!=( PhysicalDeviceSparseImageFormatInfo2KHR const& rhs ) const
14552 {
14553 return !operator==( rhs );
14554 }
14555
14556 private:
14557 StructureType sType;
14558
14559 public:
14560 const void* pNext;
14561 Format format;
14562 ImageType type;
14563 SampleCountFlagBits samples;
14564 ImageUsageFlags usage;
14565 ImageTiling tiling;
14566 };
14567 static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2KHR ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2KHR ), "struct and wrapper have different size!" );
14568
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014569 enum class AttachmentDescriptionFlagBits
14570 {
14571 eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
14572 };
14573
14574 using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits, VkAttachmentDescriptionFlags>;
14575
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014576 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014577 {
14578 return AttachmentDescriptionFlags( bit0 ) | bit1;
14579 }
14580
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014581 VULKAN_HPP_INLINE AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits )
14582 {
14583 return ~( AttachmentDescriptionFlags( bits ) );
14584 }
14585
14586 template <> struct FlagTraits<AttachmentDescriptionFlagBits>
14587 {
14588 enum
14589 {
14590 allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
14591 };
14592 };
14593
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014594 struct AttachmentDescription
14595 {
14596 AttachmentDescription( AttachmentDescriptionFlags flags_ = AttachmentDescriptionFlags(), Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined )
14597 : flags( flags_ )
14598 , format( format_ )
14599 , samples( samples_ )
14600 , loadOp( loadOp_ )
14601 , storeOp( storeOp_ )
14602 , stencilLoadOp( stencilLoadOp_ )
14603 , stencilStoreOp( stencilStoreOp_ )
14604 , initialLayout( initialLayout_ )
14605 , finalLayout( finalLayout_ )
14606 {
14607 }
14608
14609 AttachmentDescription( VkAttachmentDescription const & rhs )
14610 {
14611 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14612 }
14613
14614 AttachmentDescription& operator=( VkAttachmentDescription const & rhs )
14615 {
14616 memcpy( this, &rhs, sizeof(AttachmentDescription) );
14617 return *this;
14618 }
14619
14620 AttachmentDescription& setFlags( AttachmentDescriptionFlags flags_ )
14621 {
14622 flags = flags_;
14623 return *this;
14624 }
14625
14626 AttachmentDescription& setFormat( Format format_ )
14627 {
14628 format = format_;
14629 return *this;
14630 }
14631
14632 AttachmentDescription& setSamples( SampleCountFlagBits samples_ )
14633 {
14634 samples = samples_;
14635 return *this;
14636 }
14637
14638 AttachmentDescription& setLoadOp( AttachmentLoadOp loadOp_ )
14639 {
14640 loadOp = loadOp_;
14641 return *this;
14642 }
14643
14644 AttachmentDescription& setStoreOp( AttachmentStoreOp storeOp_ )
14645 {
14646 storeOp = storeOp_;
14647 return *this;
14648 }
14649
14650 AttachmentDescription& setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ )
14651 {
14652 stencilLoadOp = stencilLoadOp_;
14653 return *this;
14654 }
14655
14656 AttachmentDescription& setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ )
14657 {
14658 stencilStoreOp = stencilStoreOp_;
14659 return *this;
14660 }
14661
14662 AttachmentDescription& setInitialLayout( ImageLayout initialLayout_ )
14663 {
14664 initialLayout = initialLayout_;
14665 return *this;
14666 }
14667
14668 AttachmentDescription& setFinalLayout( ImageLayout finalLayout_ )
14669 {
14670 finalLayout = finalLayout_;
14671 return *this;
14672 }
14673
14674 operator const VkAttachmentDescription&() const
14675 {
14676 return *reinterpret_cast<const VkAttachmentDescription*>(this);
14677 }
14678
14679 bool operator==( AttachmentDescription const& rhs ) const
14680 {
14681 return ( flags == rhs.flags )
14682 && ( format == rhs.format )
14683 && ( samples == rhs.samples )
14684 && ( loadOp == rhs.loadOp )
14685 && ( storeOp == rhs.storeOp )
14686 && ( stencilLoadOp == rhs.stencilLoadOp )
14687 && ( stencilStoreOp == rhs.stencilStoreOp )
14688 && ( initialLayout == rhs.initialLayout )
14689 && ( finalLayout == rhs.finalLayout );
14690 }
14691
14692 bool operator!=( AttachmentDescription const& rhs ) const
14693 {
14694 return !operator==( rhs );
14695 }
14696
14697 AttachmentDescriptionFlags flags;
14698 Format format;
14699 SampleCountFlagBits samples;
14700 AttachmentLoadOp loadOp;
14701 AttachmentStoreOp storeOp;
14702 AttachmentLoadOp stencilLoadOp;
14703 AttachmentStoreOp stencilStoreOp;
14704 ImageLayout initialLayout;
14705 ImageLayout finalLayout;
14706 };
14707 static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
14708
14709 enum class StencilFaceFlagBits
14710 {
14711 eFront = VK_STENCIL_FACE_FRONT_BIT,
14712 eBack = VK_STENCIL_FACE_BACK_BIT,
14713 eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
14714 };
14715
14716 using StencilFaceFlags = Flags<StencilFaceFlagBits, VkStencilFaceFlags>;
14717
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014718 VULKAN_HPP_INLINE StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014719 {
14720 return StencilFaceFlags( bit0 ) | bit1;
14721 }
14722
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014723 VULKAN_HPP_INLINE StencilFaceFlags operator~( StencilFaceFlagBits bits )
14724 {
14725 return ~( StencilFaceFlags( bits ) );
14726 }
14727
14728 template <> struct FlagTraits<StencilFaceFlagBits>
14729 {
14730 enum
14731 {
14732 allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eVkStencilFrontAndBack)
14733 };
14734 };
14735
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014736 enum class DescriptorPoolCreateFlagBits
14737 {
14738 eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
14739 };
14740
14741 using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits, VkDescriptorPoolCreateFlags>;
14742
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014743 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014744 {
14745 return DescriptorPoolCreateFlags( bit0 ) | bit1;
14746 }
14747
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014748 VULKAN_HPP_INLINE DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits )
14749 {
14750 return ~( DescriptorPoolCreateFlags( bits ) );
14751 }
14752
14753 template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
14754 {
14755 enum
14756 {
14757 allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet)
14758 };
14759 };
14760
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014761 struct DescriptorPoolCreateInfo
14762 {
14763 DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = DescriptorPoolCreateFlags(), uint32_t maxSets_ = 0, uint32_t poolSizeCount_ = 0, const DescriptorPoolSize* pPoolSizes_ = nullptr )
14764 : sType( StructureType::eDescriptorPoolCreateInfo )
14765 , pNext( nullptr )
14766 , flags( flags_ )
14767 , maxSets( maxSets_ )
14768 , poolSizeCount( poolSizeCount_ )
14769 , pPoolSizes( pPoolSizes_ )
14770 {
14771 }
14772
14773 DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs )
14774 {
14775 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14776 }
14777
14778 DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs )
14779 {
14780 memcpy( this, &rhs, sizeof(DescriptorPoolCreateInfo) );
14781 return *this;
14782 }
14783
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014784 DescriptorPoolCreateInfo& setPNext( const void* pNext_ )
14785 {
14786 pNext = pNext_;
14787 return *this;
14788 }
14789
14790 DescriptorPoolCreateInfo& setFlags( DescriptorPoolCreateFlags flags_ )
14791 {
14792 flags = flags_;
14793 return *this;
14794 }
14795
14796 DescriptorPoolCreateInfo& setMaxSets( uint32_t maxSets_ )
14797 {
14798 maxSets = maxSets_;
14799 return *this;
14800 }
14801
14802 DescriptorPoolCreateInfo& setPoolSizeCount( uint32_t poolSizeCount_ )
14803 {
14804 poolSizeCount = poolSizeCount_;
14805 return *this;
14806 }
14807
14808 DescriptorPoolCreateInfo& setPPoolSizes( const DescriptorPoolSize* pPoolSizes_ )
14809 {
14810 pPoolSizes = pPoolSizes_;
14811 return *this;
14812 }
14813
14814 operator const VkDescriptorPoolCreateInfo&() const
14815 {
14816 return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>(this);
14817 }
14818
14819 bool operator==( DescriptorPoolCreateInfo const& rhs ) const
14820 {
14821 return ( sType == rhs.sType )
14822 && ( pNext == rhs.pNext )
14823 && ( flags == rhs.flags )
14824 && ( maxSets == rhs.maxSets )
14825 && ( poolSizeCount == rhs.poolSizeCount )
14826 && ( pPoolSizes == rhs.pPoolSizes );
14827 }
14828
14829 bool operator!=( DescriptorPoolCreateInfo const& rhs ) const
14830 {
14831 return !operator==( rhs );
14832 }
14833
14834 private:
14835 StructureType sType;
14836
14837 public:
14838 const void* pNext;
14839 DescriptorPoolCreateFlags flags;
14840 uint32_t maxSets;
14841 uint32_t poolSizeCount;
14842 const DescriptorPoolSize* pPoolSizes;
14843 };
14844 static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
14845
14846 enum class DependencyFlagBits
14847 {
14848 eByRegion = VK_DEPENDENCY_BY_REGION_BIT
14849 };
14850
14851 using DependencyFlags = Flags<DependencyFlagBits, VkDependencyFlags>;
14852
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014853 VULKAN_HPP_INLINE DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060014854 {
14855 return DependencyFlags( bit0 ) | bit1;
14856 }
14857
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014858 VULKAN_HPP_INLINE DependencyFlags operator~( DependencyFlagBits bits )
14859 {
14860 return ~( DependencyFlags( bits ) );
14861 }
14862
14863 template <> struct FlagTraits<DependencyFlagBits>
14864 {
14865 enum
14866 {
14867 allFlags = VkFlags(DependencyFlagBits::eByRegion)
14868 };
14869 };
14870
14871 struct SubpassDependency
14872 {
14873 SubpassDependency( uint32_t srcSubpass_ = 0, uint32_t dstSubpass_ = 0, PipelineStageFlags srcStageMask_ = PipelineStageFlags(), PipelineStageFlags dstStageMask_ = PipelineStageFlags(), AccessFlags srcAccessMask_ = AccessFlags(), AccessFlags dstAccessMask_ = AccessFlags(), DependencyFlags dependencyFlags_ = DependencyFlags() )
14874 : srcSubpass( srcSubpass_ )
14875 , dstSubpass( dstSubpass_ )
14876 , srcStageMask( srcStageMask_ )
14877 , dstStageMask( dstStageMask_ )
14878 , srcAccessMask( srcAccessMask_ )
14879 , dstAccessMask( dstAccessMask_ )
14880 , dependencyFlags( dependencyFlags_ )
14881 {
14882 }
14883
14884 SubpassDependency( VkSubpassDependency const & rhs )
14885 {
14886 memcpy( this, &rhs, sizeof(SubpassDependency) );
14887 }
14888
14889 SubpassDependency& operator=( VkSubpassDependency const & rhs )
14890 {
14891 memcpy( this, &rhs, sizeof(SubpassDependency) );
14892 return *this;
14893 }
14894
14895 SubpassDependency& setSrcSubpass( uint32_t srcSubpass_ )
14896 {
14897 srcSubpass = srcSubpass_;
14898 return *this;
14899 }
14900
14901 SubpassDependency& setDstSubpass( uint32_t dstSubpass_ )
14902 {
14903 dstSubpass = dstSubpass_;
14904 return *this;
14905 }
14906
14907 SubpassDependency& setSrcStageMask( PipelineStageFlags srcStageMask_ )
14908 {
14909 srcStageMask = srcStageMask_;
14910 return *this;
14911 }
14912
14913 SubpassDependency& setDstStageMask( PipelineStageFlags dstStageMask_ )
14914 {
14915 dstStageMask = dstStageMask_;
14916 return *this;
14917 }
14918
14919 SubpassDependency& setSrcAccessMask( AccessFlags srcAccessMask_ )
14920 {
14921 srcAccessMask = srcAccessMask_;
14922 return *this;
14923 }
14924
14925 SubpassDependency& setDstAccessMask( AccessFlags dstAccessMask_ )
14926 {
14927 dstAccessMask = dstAccessMask_;
14928 return *this;
14929 }
14930
14931 SubpassDependency& setDependencyFlags( DependencyFlags dependencyFlags_ )
14932 {
14933 dependencyFlags = dependencyFlags_;
14934 return *this;
14935 }
14936
14937 operator const VkSubpassDependency&() const
14938 {
14939 return *reinterpret_cast<const VkSubpassDependency*>(this);
14940 }
14941
14942 bool operator==( SubpassDependency const& rhs ) const
14943 {
14944 return ( srcSubpass == rhs.srcSubpass )
14945 && ( dstSubpass == rhs.dstSubpass )
14946 && ( srcStageMask == rhs.srcStageMask )
14947 && ( dstStageMask == rhs.dstStageMask )
14948 && ( srcAccessMask == rhs.srcAccessMask )
14949 && ( dstAccessMask == rhs.dstAccessMask )
14950 && ( dependencyFlags == rhs.dependencyFlags );
14951 }
14952
14953 bool operator!=( SubpassDependency const& rhs ) const
14954 {
14955 return !operator==( rhs );
14956 }
14957
14958 uint32_t srcSubpass;
14959 uint32_t dstSubpass;
14960 PipelineStageFlags srcStageMask;
14961 PipelineStageFlags dstStageMask;
14962 AccessFlags srcAccessMask;
14963 AccessFlags dstAccessMask;
14964 DependencyFlags dependencyFlags;
14965 };
14966 static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
14967
14968 struct RenderPassCreateInfo
14969 {
14970 RenderPassCreateInfo( RenderPassCreateFlags flags_ = RenderPassCreateFlags(), uint32_t attachmentCount_ = 0, const AttachmentDescription* pAttachments_ = nullptr, uint32_t subpassCount_ = 0, const SubpassDescription* pSubpasses_ = nullptr, uint32_t dependencyCount_ = 0, const SubpassDependency* pDependencies_ = nullptr )
14971 : sType( StructureType::eRenderPassCreateInfo )
14972 , pNext( nullptr )
14973 , flags( flags_ )
14974 , attachmentCount( attachmentCount_ )
14975 , pAttachments( pAttachments_ )
14976 , subpassCount( subpassCount_ )
14977 , pSubpasses( pSubpasses_ )
14978 , dependencyCount( dependencyCount_ )
14979 , pDependencies( pDependencies_ )
14980 {
14981 }
14982
14983 RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs )
14984 {
14985 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
14986 }
14987
14988 RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs )
14989 {
14990 memcpy( this, &rhs, sizeof(RenderPassCreateInfo) );
14991 return *this;
14992 }
14993
Mark Lobodzinski2d589822016-12-12 09:44:34 -070014994 RenderPassCreateInfo& setPNext( const void* pNext_ )
14995 {
14996 pNext = pNext_;
14997 return *this;
14998 }
14999
15000 RenderPassCreateInfo& setFlags( RenderPassCreateFlags flags_ )
15001 {
15002 flags = flags_;
15003 return *this;
15004 }
15005
15006 RenderPassCreateInfo& setAttachmentCount( uint32_t attachmentCount_ )
15007 {
15008 attachmentCount = attachmentCount_;
15009 return *this;
15010 }
15011
15012 RenderPassCreateInfo& setPAttachments( const AttachmentDescription* pAttachments_ )
15013 {
15014 pAttachments = pAttachments_;
15015 return *this;
15016 }
15017
15018 RenderPassCreateInfo& setSubpassCount( uint32_t subpassCount_ )
15019 {
15020 subpassCount = subpassCount_;
15021 return *this;
15022 }
15023
15024 RenderPassCreateInfo& setPSubpasses( const SubpassDescription* pSubpasses_ )
15025 {
15026 pSubpasses = pSubpasses_;
15027 return *this;
15028 }
15029
15030 RenderPassCreateInfo& setDependencyCount( uint32_t dependencyCount_ )
15031 {
15032 dependencyCount = dependencyCount_;
15033 return *this;
15034 }
15035
15036 RenderPassCreateInfo& setPDependencies( const SubpassDependency* pDependencies_ )
15037 {
15038 pDependencies = pDependencies_;
15039 return *this;
15040 }
15041
15042 operator const VkRenderPassCreateInfo&() const
15043 {
15044 return *reinterpret_cast<const VkRenderPassCreateInfo*>(this);
15045 }
15046
15047 bool operator==( RenderPassCreateInfo const& rhs ) const
15048 {
15049 return ( sType == rhs.sType )
15050 && ( pNext == rhs.pNext )
15051 && ( flags == rhs.flags )
15052 && ( attachmentCount == rhs.attachmentCount )
15053 && ( pAttachments == rhs.pAttachments )
15054 && ( subpassCount == rhs.subpassCount )
15055 && ( pSubpasses == rhs.pSubpasses )
15056 && ( dependencyCount == rhs.dependencyCount )
15057 && ( pDependencies == rhs.pDependencies );
15058 }
15059
15060 bool operator!=( RenderPassCreateInfo const& rhs ) const
15061 {
15062 return !operator==( rhs );
15063 }
15064
15065 private:
15066 StructureType sType;
15067
15068 public:
15069 const void* pNext;
15070 RenderPassCreateFlags flags;
15071 uint32_t attachmentCount;
15072 const AttachmentDescription* pAttachments;
15073 uint32_t subpassCount;
15074 const SubpassDescription* pSubpasses;
15075 uint32_t dependencyCount;
15076 const SubpassDependency* pDependencies;
15077 };
15078 static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
15079
15080 enum class PresentModeKHR
15081 {
15082 eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
15083 eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
15084 eFifo = VK_PRESENT_MODE_FIFO_KHR,
15085 eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR
15086 };
15087
15088 enum class ColorSpaceKHR
15089 {
Mark Young39389872017-01-19 21:10:49 -070015090 eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
15091 eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
15092 eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
15093 eScrgbLinearEXT = VK_COLOR_SPACE_SCRGB_LINEAR_EXT,
15094 eScrgbNonlinearEXT = VK_COLOR_SPACE_SCRGB_NONLINEAR_EXT,
15095 eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT,
15096 eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
15097 eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
15098 eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
15099 eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
15100 eBt2020NonlinearEXT = VK_COLOR_SPACE_BT2020_NONLINEAR_EXT,
15101 eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
15102 eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015103 };
15104
15105 struct SurfaceFormatKHR
15106 {
15107 operator const VkSurfaceFormatKHR&() const
15108 {
15109 return *reinterpret_cast<const VkSurfaceFormatKHR*>(this);
15110 }
15111
15112 bool operator==( SurfaceFormatKHR const& rhs ) const
15113 {
15114 return ( format == rhs.format )
15115 && ( colorSpace == rhs.colorSpace );
15116 }
15117
15118 bool operator!=( SurfaceFormatKHR const& rhs ) const
15119 {
15120 return !operator==( rhs );
15121 }
15122
15123 Format format;
15124 ColorSpaceKHR colorSpace;
15125 };
15126 static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
15127
15128 enum class DisplayPlaneAlphaFlagBitsKHR
15129 {
15130 eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
15131 eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
15132 ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
15133 ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
15134 };
15135
15136 using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR, VkDisplayPlaneAlphaFlagsKHR>;
15137
15138 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 )
15139 {
15140 return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
15141 }
15142
15143 VULKAN_HPP_INLINE DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits )
15144 {
15145 return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
15146 }
15147
15148 template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
15149 {
15150 enum
15151 {
15152 allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
15153 };
15154 };
15155
15156 struct DisplayPlaneCapabilitiesKHR
15157 {
15158 operator const VkDisplayPlaneCapabilitiesKHR&() const
15159 {
15160 return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>(this);
15161 }
15162
15163 bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const
15164 {
15165 return ( supportedAlpha == rhs.supportedAlpha )
15166 && ( minSrcPosition == rhs.minSrcPosition )
15167 && ( maxSrcPosition == rhs.maxSrcPosition )
15168 && ( minSrcExtent == rhs.minSrcExtent )
15169 && ( maxSrcExtent == rhs.maxSrcExtent )
15170 && ( minDstPosition == rhs.minDstPosition )
15171 && ( maxDstPosition == rhs.maxDstPosition )
15172 && ( minDstExtent == rhs.minDstExtent )
15173 && ( maxDstExtent == rhs.maxDstExtent );
15174 }
15175
15176 bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const
15177 {
15178 return !operator==( rhs );
15179 }
15180
15181 DisplayPlaneAlphaFlagsKHR supportedAlpha;
15182 Offset2D minSrcPosition;
15183 Offset2D maxSrcPosition;
15184 Extent2D minSrcExtent;
15185 Extent2D maxSrcExtent;
15186 Offset2D minDstPosition;
15187 Offset2D maxDstPosition;
15188 Extent2D minDstExtent;
15189 Extent2D maxDstExtent;
15190 };
15191 static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
15192
15193 enum class CompositeAlphaFlagBitsKHR
15194 {
15195 eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
15196 ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
15197 ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
15198 eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
15199 };
15200
15201 using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR, VkCompositeAlphaFlagsKHR>;
15202
15203 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 )
15204 {
15205 return CompositeAlphaFlagsKHR( bit0 ) | bit1;
15206 }
15207
15208 VULKAN_HPP_INLINE CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits )
15209 {
15210 return ~( CompositeAlphaFlagsKHR( bits ) );
15211 }
15212
15213 template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
15214 {
15215 enum
15216 {
15217 allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
15218 };
15219 };
15220
15221 enum class SurfaceTransformFlagBitsKHR
15222 {
15223 eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
15224 eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
15225 eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
15226 eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
15227 eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
15228 eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
15229 eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
15230 eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
15231 eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
15232 };
15233
15234 using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR, VkSurfaceTransformFlagsKHR>;
15235
15236 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 )
15237 {
15238 return SurfaceTransformFlagsKHR( bit0 ) | bit1;
15239 }
15240
15241 VULKAN_HPP_INLINE SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits )
15242 {
15243 return ~( SurfaceTransformFlagsKHR( bits ) );
15244 }
15245
15246 template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
15247 {
15248 enum
15249 {
15250 allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
15251 };
15252 };
15253
15254 struct DisplayPropertiesKHR
15255 {
15256 operator const VkDisplayPropertiesKHR&() const
15257 {
15258 return *reinterpret_cast<const VkDisplayPropertiesKHR*>(this);
15259 }
15260
15261 bool operator==( DisplayPropertiesKHR const& rhs ) const
15262 {
15263 return ( display == rhs.display )
15264 && ( displayName == rhs.displayName )
15265 && ( physicalDimensions == rhs.physicalDimensions )
15266 && ( physicalResolution == rhs.physicalResolution )
15267 && ( supportedTransforms == rhs.supportedTransforms )
15268 && ( planeReorderPossible == rhs.planeReorderPossible )
15269 && ( persistentContent == rhs.persistentContent );
15270 }
15271
15272 bool operator!=( DisplayPropertiesKHR const& rhs ) const
15273 {
15274 return !operator==( rhs );
15275 }
15276
15277 DisplayKHR display;
15278 const char* displayName;
15279 Extent2D physicalDimensions;
15280 Extent2D physicalResolution;
15281 SurfaceTransformFlagsKHR supportedTransforms;
15282 Bool32 planeReorderPossible;
15283 Bool32 persistentContent;
15284 };
15285 static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
15286
15287 struct DisplaySurfaceCreateInfoKHR
15288 {
15289 DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = DisplaySurfaceCreateFlagsKHR(), DisplayModeKHR displayMode_ = DisplayModeKHR(), uint32_t planeIndex_ = 0, uint32_t planeStackIndex_ = 0, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = 0, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = Extent2D() )
15290 : sType( StructureType::eDisplaySurfaceCreateInfoKHR )
15291 , pNext( nullptr )
15292 , flags( flags_ )
15293 , displayMode( displayMode_ )
15294 , planeIndex( planeIndex_ )
15295 , planeStackIndex( planeStackIndex_ )
15296 , transform( transform_ )
15297 , globalAlpha( globalAlpha_ )
15298 , alphaMode( alphaMode_ )
15299 , imageExtent( imageExtent_ )
15300 {
15301 }
15302
15303 DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs )
15304 {
15305 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
15306 }
15307
15308 DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs )
15309 {
15310 memcpy( this, &rhs, sizeof(DisplaySurfaceCreateInfoKHR) );
15311 return *this;
15312 }
15313
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015314 DisplaySurfaceCreateInfoKHR& setPNext( const void* pNext_ )
15315 {
15316 pNext = pNext_;
15317 return *this;
15318 }
15319
15320 DisplaySurfaceCreateInfoKHR& setFlags( DisplaySurfaceCreateFlagsKHR flags_ )
15321 {
15322 flags = flags_;
15323 return *this;
15324 }
15325
15326 DisplaySurfaceCreateInfoKHR& setDisplayMode( DisplayModeKHR displayMode_ )
15327 {
15328 displayMode = displayMode_;
15329 return *this;
15330 }
15331
15332 DisplaySurfaceCreateInfoKHR& setPlaneIndex( uint32_t planeIndex_ )
15333 {
15334 planeIndex = planeIndex_;
15335 return *this;
15336 }
15337
15338 DisplaySurfaceCreateInfoKHR& setPlaneStackIndex( uint32_t planeStackIndex_ )
15339 {
15340 planeStackIndex = planeStackIndex_;
15341 return *this;
15342 }
15343
15344 DisplaySurfaceCreateInfoKHR& setTransform( SurfaceTransformFlagBitsKHR transform_ )
15345 {
15346 transform = transform_;
15347 return *this;
15348 }
15349
15350 DisplaySurfaceCreateInfoKHR& setGlobalAlpha( float globalAlpha_ )
15351 {
15352 globalAlpha = globalAlpha_;
15353 return *this;
15354 }
15355
15356 DisplaySurfaceCreateInfoKHR& setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ )
15357 {
15358 alphaMode = alphaMode_;
15359 return *this;
15360 }
15361
15362 DisplaySurfaceCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
15363 {
15364 imageExtent = imageExtent_;
15365 return *this;
15366 }
15367
15368 operator const VkDisplaySurfaceCreateInfoKHR&() const
15369 {
15370 return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>(this);
15371 }
15372
15373 bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const
15374 {
15375 return ( sType == rhs.sType )
15376 && ( pNext == rhs.pNext )
15377 && ( flags == rhs.flags )
15378 && ( displayMode == rhs.displayMode )
15379 && ( planeIndex == rhs.planeIndex )
15380 && ( planeStackIndex == rhs.planeStackIndex )
15381 && ( transform == rhs.transform )
15382 && ( globalAlpha == rhs.globalAlpha )
15383 && ( alphaMode == rhs.alphaMode )
15384 && ( imageExtent == rhs.imageExtent );
15385 }
15386
15387 bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const
15388 {
15389 return !operator==( rhs );
15390 }
15391
15392 private:
15393 StructureType sType;
15394
15395 public:
15396 const void* pNext;
15397 DisplaySurfaceCreateFlagsKHR flags;
15398 DisplayModeKHR displayMode;
15399 uint32_t planeIndex;
15400 uint32_t planeStackIndex;
15401 SurfaceTransformFlagBitsKHR transform;
15402 float globalAlpha;
15403 DisplayPlaneAlphaFlagBitsKHR alphaMode;
15404 Extent2D imageExtent;
15405 };
15406 static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
15407
15408 struct SurfaceCapabilitiesKHR
15409 {
15410 operator const VkSurfaceCapabilitiesKHR&() const
15411 {
15412 return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>(this);
15413 }
15414
15415 bool operator==( SurfaceCapabilitiesKHR const& rhs ) const
15416 {
15417 return ( minImageCount == rhs.minImageCount )
15418 && ( maxImageCount == rhs.maxImageCount )
15419 && ( currentExtent == rhs.currentExtent )
15420 && ( minImageExtent == rhs.minImageExtent )
15421 && ( maxImageExtent == rhs.maxImageExtent )
15422 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
15423 && ( supportedTransforms == rhs.supportedTransforms )
15424 && ( currentTransform == rhs.currentTransform )
15425 && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
15426 && ( supportedUsageFlags == rhs.supportedUsageFlags );
15427 }
15428
15429 bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const
15430 {
15431 return !operator==( rhs );
15432 }
15433
15434 uint32_t minImageCount;
15435 uint32_t maxImageCount;
15436 Extent2D currentExtent;
15437 Extent2D minImageExtent;
15438 Extent2D maxImageExtent;
15439 uint32_t maxImageArrayLayers;
15440 SurfaceTransformFlagsKHR supportedTransforms;
15441 SurfaceTransformFlagBitsKHR currentTransform;
15442 CompositeAlphaFlagsKHR supportedCompositeAlpha;
15443 ImageUsageFlags supportedUsageFlags;
15444 };
15445 static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
15446
15447 struct SwapchainCreateInfoKHR
15448 {
15449 SwapchainCreateInfoKHR( SwapchainCreateFlagsKHR flags_ = SwapchainCreateFlagsKHR(), SurfaceKHR surface_ = SurfaceKHR(), uint32_t minImageCount_ = 0, Format imageFormat_ = Format::eUndefined, ColorSpaceKHR imageColorSpace_ = ColorSpaceKHR::eSrgbNonlinear, Extent2D imageExtent_ = Extent2D(), uint32_t imageArrayLayers_ = 0, ImageUsageFlags imageUsage_ = ImageUsageFlags(), SharingMode imageSharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = 0, const uint32_t* pQueueFamilyIndices_ = nullptr, SurfaceTransformFlagBitsKHR preTransform_ = SurfaceTransformFlagBitsKHR::eIdentity, CompositeAlphaFlagBitsKHR compositeAlpha_ = CompositeAlphaFlagBitsKHR::eOpaque, PresentModeKHR presentMode_ = PresentModeKHR::eImmediate, Bool32 clipped_ = 0, SwapchainKHR oldSwapchain_ = SwapchainKHR() )
15450 : sType( StructureType::eSwapchainCreateInfoKHR )
15451 , pNext( nullptr )
15452 , flags( flags_ )
15453 , surface( surface_ )
15454 , minImageCount( minImageCount_ )
15455 , imageFormat( imageFormat_ )
15456 , imageColorSpace( imageColorSpace_ )
15457 , imageExtent( imageExtent_ )
15458 , imageArrayLayers( imageArrayLayers_ )
15459 , imageUsage( imageUsage_ )
15460 , imageSharingMode( imageSharingMode_ )
15461 , queueFamilyIndexCount( queueFamilyIndexCount_ )
15462 , pQueueFamilyIndices( pQueueFamilyIndices_ )
15463 , preTransform( preTransform_ )
15464 , compositeAlpha( compositeAlpha_ )
15465 , presentMode( presentMode_ )
15466 , clipped( clipped_ )
15467 , oldSwapchain( oldSwapchain_ )
15468 {
15469 }
15470
15471 SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs )
15472 {
15473 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
15474 }
15475
15476 SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs )
15477 {
15478 memcpy( this, &rhs, sizeof(SwapchainCreateInfoKHR) );
15479 return *this;
15480 }
15481
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015482 SwapchainCreateInfoKHR& setPNext( const void* pNext_ )
15483 {
15484 pNext = pNext_;
15485 return *this;
15486 }
15487
15488 SwapchainCreateInfoKHR& setFlags( SwapchainCreateFlagsKHR flags_ )
15489 {
15490 flags = flags_;
15491 return *this;
15492 }
15493
15494 SwapchainCreateInfoKHR& setSurface( SurfaceKHR surface_ )
15495 {
15496 surface = surface_;
15497 return *this;
15498 }
15499
15500 SwapchainCreateInfoKHR& setMinImageCount( uint32_t minImageCount_ )
15501 {
15502 minImageCount = minImageCount_;
15503 return *this;
15504 }
15505
15506 SwapchainCreateInfoKHR& setImageFormat( Format imageFormat_ )
15507 {
15508 imageFormat = imageFormat_;
15509 return *this;
15510 }
15511
15512 SwapchainCreateInfoKHR& setImageColorSpace( ColorSpaceKHR imageColorSpace_ )
15513 {
15514 imageColorSpace = imageColorSpace_;
15515 return *this;
15516 }
15517
15518 SwapchainCreateInfoKHR& setImageExtent( Extent2D imageExtent_ )
15519 {
15520 imageExtent = imageExtent_;
15521 return *this;
15522 }
15523
15524 SwapchainCreateInfoKHR& setImageArrayLayers( uint32_t imageArrayLayers_ )
15525 {
15526 imageArrayLayers = imageArrayLayers_;
15527 return *this;
15528 }
15529
15530 SwapchainCreateInfoKHR& setImageUsage( ImageUsageFlags imageUsage_ )
15531 {
15532 imageUsage = imageUsage_;
15533 return *this;
15534 }
15535
15536 SwapchainCreateInfoKHR& setImageSharingMode( SharingMode imageSharingMode_ )
15537 {
15538 imageSharingMode = imageSharingMode_;
15539 return *this;
15540 }
15541
15542 SwapchainCreateInfoKHR& setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ )
15543 {
15544 queueFamilyIndexCount = queueFamilyIndexCount_;
15545 return *this;
15546 }
15547
15548 SwapchainCreateInfoKHR& setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ )
15549 {
15550 pQueueFamilyIndices = pQueueFamilyIndices_;
15551 return *this;
15552 }
15553
15554 SwapchainCreateInfoKHR& setPreTransform( SurfaceTransformFlagBitsKHR preTransform_ )
15555 {
15556 preTransform = preTransform_;
15557 return *this;
15558 }
15559
15560 SwapchainCreateInfoKHR& setCompositeAlpha( CompositeAlphaFlagBitsKHR compositeAlpha_ )
15561 {
15562 compositeAlpha = compositeAlpha_;
15563 return *this;
15564 }
15565
15566 SwapchainCreateInfoKHR& setPresentMode( PresentModeKHR presentMode_ )
15567 {
15568 presentMode = presentMode_;
15569 return *this;
15570 }
15571
15572 SwapchainCreateInfoKHR& setClipped( Bool32 clipped_ )
15573 {
15574 clipped = clipped_;
15575 return *this;
15576 }
15577
15578 SwapchainCreateInfoKHR& setOldSwapchain( SwapchainKHR oldSwapchain_ )
15579 {
15580 oldSwapchain = oldSwapchain_;
15581 return *this;
15582 }
15583
15584 operator const VkSwapchainCreateInfoKHR&() const
15585 {
15586 return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>(this);
15587 }
15588
15589 bool operator==( SwapchainCreateInfoKHR const& rhs ) const
15590 {
15591 return ( sType == rhs.sType )
15592 && ( pNext == rhs.pNext )
15593 && ( flags == rhs.flags )
15594 && ( surface == rhs.surface )
15595 && ( minImageCount == rhs.minImageCount )
15596 && ( imageFormat == rhs.imageFormat )
15597 && ( imageColorSpace == rhs.imageColorSpace )
15598 && ( imageExtent == rhs.imageExtent )
15599 && ( imageArrayLayers == rhs.imageArrayLayers )
15600 && ( imageUsage == rhs.imageUsage )
15601 && ( imageSharingMode == rhs.imageSharingMode )
15602 && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
15603 && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
15604 && ( preTransform == rhs.preTransform )
15605 && ( compositeAlpha == rhs.compositeAlpha )
15606 && ( presentMode == rhs.presentMode )
15607 && ( clipped == rhs.clipped )
15608 && ( oldSwapchain == rhs.oldSwapchain );
15609 }
15610
15611 bool operator!=( SwapchainCreateInfoKHR const& rhs ) const
15612 {
15613 return !operator==( rhs );
15614 }
15615
15616 private:
15617 StructureType sType;
15618
15619 public:
15620 const void* pNext;
15621 SwapchainCreateFlagsKHR flags;
15622 SurfaceKHR surface;
15623 uint32_t minImageCount;
15624 Format imageFormat;
15625 ColorSpaceKHR imageColorSpace;
15626 Extent2D imageExtent;
15627 uint32_t imageArrayLayers;
15628 ImageUsageFlags imageUsage;
15629 SharingMode imageSharingMode;
15630 uint32_t queueFamilyIndexCount;
15631 const uint32_t* pQueueFamilyIndices;
15632 SurfaceTransformFlagBitsKHR preTransform;
15633 CompositeAlphaFlagBitsKHR compositeAlpha;
15634 PresentModeKHR presentMode;
15635 Bool32 clipped;
15636 SwapchainKHR oldSwapchain;
15637 };
15638 static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
15639
15640 enum class DebugReportFlagBitsEXT
15641 {
15642 eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
15643 eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
15644 ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
15645 eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
15646 eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
15647 };
15648
15649 using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT, VkDebugReportFlagsEXT>;
15650
15651 VULKAN_HPP_INLINE DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 )
15652 {
15653 return DebugReportFlagsEXT( bit0 ) | bit1;
15654 }
15655
15656 VULKAN_HPP_INLINE DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits )
15657 {
15658 return ~( DebugReportFlagsEXT( bits ) );
15659 }
15660
15661 template <> struct FlagTraits<DebugReportFlagBitsEXT>
15662 {
15663 enum
15664 {
15665 allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug)
15666 };
15667 };
15668
15669 struct DebugReportCallbackCreateInfoEXT
15670 {
15671 DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = DebugReportFlagsEXT(), PFN_vkDebugReportCallbackEXT pfnCallback_ = nullptr, void* pUserData_ = nullptr )
15672 : sType( StructureType::eDebugReportCallbackCreateInfoEXT )
15673 , pNext( nullptr )
15674 , flags( flags_ )
15675 , pfnCallback( pfnCallback_ )
15676 , pUserData( pUserData_ )
15677 {
15678 }
15679
15680 DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs )
15681 {
15682 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15683 }
15684
15685 DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs )
15686 {
15687 memcpy( this, &rhs, sizeof(DebugReportCallbackCreateInfoEXT) );
15688 return *this;
15689 }
15690
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015691 DebugReportCallbackCreateInfoEXT& setPNext( const void* pNext_ )
15692 {
15693 pNext = pNext_;
15694 return *this;
15695 }
15696
15697 DebugReportCallbackCreateInfoEXT& setFlags( DebugReportFlagsEXT flags_ )
15698 {
15699 flags = flags_;
15700 return *this;
15701 }
15702
15703 DebugReportCallbackCreateInfoEXT& setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ )
15704 {
15705 pfnCallback = pfnCallback_;
15706 return *this;
15707 }
15708
15709 DebugReportCallbackCreateInfoEXT& setPUserData( void* pUserData_ )
15710 {
15711 pUserData = pUserData_;
15712 return *this;
15713 }
15714
15715 operator const VkDebugReportCallbackCreateInfoEXT&() const
15716 {
15717 return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>(this);
15718 }
15719
15720 bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const
15721 {
15722 return ( sType == rhs.sType )
15723 && ( pNext == rhs.pNext )
15724 && ( flags == rhs.flags )
15725 && ( pfnCallback == rhs.pfnCallback )
15726 && ( pUserData == rhs.pUserData );
15727 }
15728
15729 bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const
15730 {
15731 return !operator==( rhs );
15732 }
15733
15734 private:
15735 StructureType sType;
15736
15737 public:
15738 const void* pNext;
15739 DebugReportFlagsEXT flags;
15740 PFN_vkDebugReportCallbackEXT pfnCallback;
15741 void* pUserData;
15742 };
15743 static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
15744
15745 enum class DebugReportObjectTypeEXT
15746 {
15747 eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
15748 eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
15749 ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
15750 eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
15751 eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
15752 eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
15753 eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
15754 eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
15755 eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
15756 eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
15757 eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
15758 eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
15759 eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
15760 eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
15761 eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
15762 eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
15763 ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
15764 ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
15765 eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
15766 ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
15767 eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
15768 eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
15769 eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
15770 eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
15771 eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
15772 eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
15773 eSurfaceKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
15774 eSwapchainKhr = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
15775 eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
15776 eDisplayKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
15777 eDisplayModeKhr = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
15778 eObjectTableNvx = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT,
15779 eIndirectCommandsLayoutNvx = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT
15780 };
15781
15782 struct DebugMarkerObjectNameInfoEXT
15783 {
15784 DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, const char* pObjectName_ = nullptr )
15785 : sType( StructureType::eDebugMarkerObjectNameInfoEXT )
15786 , pNext( nullptr )
15787 , objectType( objectType_ )
15788 , object( object_ )
15789 , pObjectName( pObjectName_ )
15790 {
15791 }
15792
15793 DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs )
15794 {
15795 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15796 }
15797
15798 DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs )
15799 {
15800 memcpy( this, &rhs, sizeof(DebugMarkerObjectNameInfoEXT) );
15801 return *this;
15802 }
15803
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015804 DebugMarkerObjectNameInfoEXT& setPNext( const void* pNext_ )
15805 {
15806 pNext = pNext_;
15807 return *this;
15808 }
15809
15810 DebugMarkerObjectNameInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15811 {
15812 objectType = objectType_;
15813 return *this;
15814 }
15815
15816 DebugMarkerObjectNameInfoEXT& setObject( uint64_t object_ )
15817 {
15818 object = object_;
15819 return *this;
15820 }
15821
15822 DebugMarkerObjectNameInfoEXT& setPObjectName( const char* pObjectName_ )
15823 {
15824 pObjectName = pObjectName_;
15825 return *this;
15826 }
15827
15828 operator const VkDebugMarkerObjectNameInfoEXT&() const
15829 {
15830 return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>(this);
15831 }
15832
15833 bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const
15834 {
15835 return ( sType == rhs.sType )
15836 && ( pNext == rhs.pNext )
15837 && ( objectType == rhs.objectType )
15838 && ( object == rhs.object )
15839 && ( pObjectName == rhs.pObjectName );
15840 }
15841
15842 bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const
15843 {
15844 return !operator==( rhs );
15845 }
15846
15847 private:
15848 StructureType sType;
15849
15850 public:
15851 const void* pNext;
15852 DebugReportObjectTypeEXT objectType;
15853 uint64_t object;
15854 const char* pObjectName;
15855 };
15856 static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
15857
15858 struct DebugMarkerObjectTagInfoEXT
15859 {
15860 DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = 0, uint64_t tagName_ = 0, size_t tagSize_ = 0, const void* pTag_ = nullptr )
15861 : sType( StructureType::eDebugMarkerObjectTagInfoEXT )
15862 , pNext( nullptr )
15863 , objectType( objectType_ )
15864 , object( object_ )
15865 , tagName( tagName_ )
15866 , tagSize( tagSize_ )
15867 , pTag( pTag_ )
15868 {
15869 }
15870
15871 DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs )
15872 {
15873 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15874 }
15875
15876 DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs )
15877 {
15878 memcpy( this, &rhs, sizeof(DebugMarkerObjectTagInfoEXT) );
15879 return *this;
15880 }
15881
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015882 DebugMarkerObjectTagInfoEXT& setPNext( const void* pNext_ )
15883 {
15884 pNext = pNext_;
15885 return *this;
15886 }
15887
15888 DebugMarkerObjectTagInfoEXT& setObjectType( DebugReportObjectTypeEXT objectType_ )
15889 {
15890 objectType = objectType_;
15891 return *this;
15892 }
15893
15894 DebugMarkerObjectTagInfoEXT& setObject( uint64_t object_ )
15895 {
15896 object = object_;
15897 return *this;
15898 }
15899
15900 DebugMarkerObjectTagInfoEXT& setTagName( uint64_t tagName_ )
15901 {
15902 tagName = tagName_;
15903 return *this;
15904 }
15905
15906 DebugMarkerObjectTagInfoEXT& setTagSize( size_t tagSize_ )
15907 {
15908 tagSize = tagSize_;
15909 return *this;
15910 }
15911
15912 DebugMarkerObjectTagInfoEXT& setPTag( const void* pTag_ )
15913 {
15914 pTag = pTag_;
15915 return *this;
15916 }
15917
15918 operator const VkDebugMarkerObjectTagInfoEXT&() const
15919 {
15920 return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>(this);
15921 }
15922
15923 bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const
15924 {
15925 return ( sType == rhs.sType )
15926 && ( pNext == rhs.pNext )
15927 && ( objectType == rhs.objectType )
15928 && ( object == rhs.object )
15929 && ( tagName == rhs.tagName )
15930 && ( tagSize == rhs.tagSize )
15931 && ( pTag == rhs.pTag );
15932 }
15933
15934 bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const
15935 {
15936 return !operator==( rhs );
15937 }
15938
15939 private:
15940 StructureType sType;
15941
15942 public:
15943 const void* pNext;
15944 DebugReportObjectTypeEXT objectType;
15945 uint64_t object;
15946 uint64_t tagName;
15947 size_t tagSize;
15948 const void* pTag;
15949 };
15950 static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
15951
15952 enum class DebugReportErrorEXT
15953 {
15954 eNone = VK_DEBUG_REPORT_ERROR_NONE_EXT,
15955 eCallbackRef = VK_DEBUG_REPORT_ERROR_CALLBACK_REF_EXT
15956 };
15957
15958 enum class RasterizationOrderAMD
15959 {
15960 eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
15961 eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
15962 };
15963
15964 struct PipelineRasterizationStateRasterizationOrderAMD
15965 {
15966 PipelineRasterizationStateRasterizationOrderAMD( RasterizationOrderAMD rasterizationOrder_ = RasterizationOrderAMD::eStrict )
15967 : sType( StructureType::ePipelineRasterizationStateRasterizationOrderAMD )
15968 , pNext( nullptr )
15969 , rasterizationOrder( rasterizationOrder_ )
15970 {
15971 }
15972
15973 PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
15974 {
15975 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
15976 }
15977
15978 PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs )
15979 {
15980 memcpy( this, &rhs, sizeof(PipelineRasterizationStateRasterizationOrderAMD) );
15981 return *this;
15982 }
15983
Mark Lobodzinski2d589822016-12-12 09:44:34 -070015984 PipelineRasterizationStateRasterizationOrderAMD& setPNext( const void* pNext_ )
15985 {
15986 pNext = pNext_;
15987 return *this;
15988 }
15989
15990 PipelineRasterizationStateRasterizationOrderAMD& setRasterizationOrder( RasterizationOrderAMD rasterizationOrder_ )
15991 {
15992 rasterizationOrder = rasterizationOrder_;
15993 return *this;
15994 }
15995
15996 operator const VkPipelineRasterizationStateRasterizationOrderAMD&() const
15997 {
15998 return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>(this);
15999 }
16000
16001 bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
16002 {
16003 return ( sType == rhs.sType )
16004 && ( pNext == rhs.pNext )
16005 && ( rasterizationOrder == rhs.rasterizationOrder );
16006 }
16007
16008 bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const
16009 {
16010 return !operator==( rhs );
16011 }
16012
16013 private:
16014 StructureType sType;
16015
16016 public:
16017 const void* pNext;
16018 RasterizationOrderAMD rasterizationOrder;
16019 };
16020 static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
16021
16022 enum class ExternalMemoryHandleTypeFlagBitsNV
16023 {
16024 eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
16025 eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
16026 eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
16027 eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
16028 };
16029
16030 using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV, VkExternalMemoryHandleTypeFlagsNV>;
16031
16032 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 )
16033 {
16034 return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
16035 }
16036
16037 VULKAN_HPP_INLINE ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits )
16038 {
16039 return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
16040 }
16041
16042 template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
16043 {
16044 enum
16045 {
16046 allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
16047 };
16048 };
16049
16050 struct ExternalMemoryImageCreateInfoNV
16051 {
16052 ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
16053 : sType( StructureType::eExternalMemoryImageCreateInfoNV )
16054 , pNext( nullptr )
16055 , handleTypes( handleTypes_ )
16056 {
16057 }
16058
16059 ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs )
16060 {
16061 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
16062 }
16063
16064 ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs )
16065 {
16066 memcpy( this, &rhs, sizeof(ExternalMemoryImageCreateInfoNV) );
16067 return *this;
16068 }
16069
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016070 ExternalMemoryImageCreateInfoNV& setPNext( const void* pNext_ )
16071 {
16072 pNext = pNext_;
16073 return *this;
16074 }
16075
16076 ExternalMemoryImageCreateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
16077 {
16078 handleTypes = handleTypes_;
16079 return *this;
16080 }
16081
16082 operator const VkExternalMemoryImageCreateInfoNV&() const
16083 {
16084 return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>(this);
16085 }
16086
16087 bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const
16088 {
16089 return ( sType == rhs.sType )
16090 && ( pNext == rhs.pNext )
16091 && ( handleTypes == rhs.handleTypes );
16092 }
16093
16094 bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const
16095 {
16096 return !operator==( rhs );
16097 }
16098
16099 private:
16100 StructureType sType;
16101
16102 public:
16103 const void* pNext;
16104 ExternalMemoryHandleTypeFlagsNV handleTypes;
16105 };
16106 static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
16107
16108 struct ExportMemoryAllocateInfoNV
16109 {
16110 ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = ExternalMemoryHandleTypeFlagsNV() )
16111 : sType( StructureType::eExportMemoryAllocateInfoNV )
16112 , pNext( nullptr )
16113 , handleTypes( handleTypes_ )
16114 {
16115 }
16116
16117 ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs )
16118 {
16119 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
16120 }
16121
16122 ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs )
16123 {
16124 memcpy( this, &rhs, sizeof(ExportMemoryAllocateInfoNV) );
16125 return *this;
16126 }
16127
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016128 ExportMemoryAllocateInfoNV& setPNext( const void* pNext_ )
16129 {
16130 pNext = pNext_;
16131 return *this;
16132 }
16133
16134 ExportMemoryAllocateInfoNV& setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ )
16135 {
16136 handleTypes = handleTypes_;
16137 return *this;
16138 }
16139
16140 operator const VkExportMemoryAllocateInfoNV&() const
16141 {
16142 return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>(this);
16143 }
16144
16145 bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const
16146 {
16147 return ( sType == rhs.sType )
16148 && ( pNext == rhs.pNext )
16149 && ( handleTypes == rhs.handleTypes );
16150 }
16151
16152 bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const
16153 {
16154 return !operator==( rhs );
16155 }
16156
16157 private:
16158 StructureType sType;
16159
16160 public:
16161 const void* pNext;
16162 ExternalMemoryHandleTypeFlagsNV handleTypes;
16163 };
16164 static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
16165
16166#ifdef VK_USE_PLATFORM_WIN32_KHR
16167 struct ImportMemoryWin32HandleInfoNV
16168 {
16169 ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = ExternalMemoryHandleTypeFlagsNV(), HANDLE handle_ = 0 )
16170 : sType( StructureType::eImportMemoryWin32HandleInfoNV )
16171 , pNext( nullptr )
16172 , handleType( handleType_ )
16173 , handle( handle_ )
16174 {
16175 }
16176
16177 ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs )
16178 {
16179 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
16180 }
16181
16182 ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs )
16183 {
16184 memcpy( this, &rhs, sizeof(ImportMemoryWin32HandleInfoNV) );
16185 return *this;
16186 }
16187
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016188 ImportMemoryWin32HandleInfoNV& setPNext( const void* pNext_ )
16189 {
16190 pNext = pNext_;
16191 return *this;
16192 }
16193
16194 ImportMemoryWin32HandleInfoNV& setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ )
16195 {
16196 handleType = handleType_;
16197 return *this;
16198 }
16199
16200 ImportMemoryWin32HandleInfoNV& setHandle( HANDLE handle_ )
16201 {
16202 handle = handle_;
16203 return *this;
16204 }
16205
16206 operator const VkImportMemoryWin32HandleInfoNV&() const
16207 {
16208 return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>(this);
16209 }
16210
16211 bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const
16212 {
16213 return ( sType == rhs.sType )
16214 && ( pNext == rhs.pNext )
16215 && ( handleType == rhs.handleType )
16216 && ( handle == rhs.handle );
16217 }
16218
16219 bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const
16220 {
16221 return !operator==( rhs );
16222 }
16223
16224 private:
16225 StructureType sType;
16226
16227 public:
16228 const void* pNext;
16229 ExternalMemoryHandleTypeFlagsNV handleType;
16230 HANDLE handle;
16231 };
16232 static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
16233#endif /*VK_USE_PLATFORM_WIN32_KHR*/
16234
16235 enum class ExternalMemoryFeatureFlagBitsNV
16236 {
16237 eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
16238 eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
16239 eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
16240 };
16241
16242 using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV, VkExternalMemoryFeatureFlagsNV>;
16243
16244 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 )
16245 {
16246 return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
16247 }
16248
16249 VULKAN_HPP_INLINE ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits )
16250 {
16251 return ~( ExternalMemoryFeatureFlagsNV( bits ) );
16252 }
16253
16254 template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
16255 {
16256 enum
16257 {
16258 allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
16259 };
16260 };
16261
16262 struct ExternalImageFormatPropertiesNV
16263 {
16264 operator const VkExternalImageFormatPropertiesNV&() const
16265 {
16266 return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>(this);
16267 }
16268
16269 bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const
16270 {
16271 return ( imageFormatProperties == rhs.imageFormatProperties )
16272 && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
16273 && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
16274 && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
16275 }
16276
16277 bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const
16278 {
16279 return !operator==( rhs );
16280 }
16281
16282 ImageFormatProperties imageFormatProperties;
16283 ExternalMemoryFeatureFlagsNV externalMemoryFeatures;
16284 ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes;
16285 ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes;
16286 };
16287 static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
16288
16289 enum class ValidationCheckEXT
16290 {
16291 eAll = VK_VALIDATION_CHECK_ALL_EXT
16292 };
16293
16294 struct ValidationFlagsEXT
16295 {
16296 ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = 0, ValidationCheckEXT* pDisabledValidationChecks_ = nullptr )
16297 : sType( StructureType::eValidationFlagsEXT )
16298 , pNext( nullptr )
16299 , disabledValidationCheckCount( disabledValidationCheckCount_ )
16300 , pDisabledValidationChecks( pDisabledValidationChecks_ )
16301 {
16302 }
16303
16304 ValidationFlagsEXT( VkValidationFlagsEXT const & rhs )
16305 {
16306 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
16307 }
16308
16309 ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs )
16310 {
16311 memcpy( this, &rhs, sizeof(ValidationFlagsEXT) );
16312 return *this;
16313 }
16314
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016315 ValidationFlagsEXT& setPNext( const void* pNext_ )
16316 {
16317 pNext = pNext_;
16318 return *this;
16319 }
16320
16321 ValidationFlagsEXT& setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ )
16322 {
16323 disabledValidationCheckCount = disabledValidationCheckCount_;
16324 return *this;
16325 }
16326
16327 ValidationFlagsEXT& setPDisabledValidationChecks( ValidationCheckEXT* pDisabledValidationChecks_ )
16328 {
16329 pDisabledValidationChecks = pDisabledValidationChecks_;
16330 return *this;
16331 }
16332
16333 operator const VkValidationFlagsEXT&() const
16334 {
16335 return *reinterpret_cast<const VkValidationFlagsEXT*>(this);
16336 }
16337
16338 bool operator==( ValidationFlagsEXT const& rhs ) const
16339 {
16340 return ( sType == rhs.sType )
16341 && ( pNext == rhs.pNext )
16342 && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
16343 && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
16344 }
16345
16346 bool operator!=( ValidationFlagsEXT const& rhs ) const
16347 {
16348 return !operator==( rhs );
16349 }
16350
16351 private:
16352 StructureType sType;
16353
16354 public:
16355 const void* pNext;
16356 uint32_t disabledValidationCheckCount;
16357 ValidationCheckEXT* pDisabledValidationChecks;
16358 };
16359 static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
16360
16361 enum class IndirectCommandsLayoutUsageFlagBitsNVX
16362 {
16363 eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX,
16364 eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX,
16365 eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX,
16366 eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX
16367 };
16368
16369 using IndirectCommandsLayoutUsageFlagsNVX = Flags<IndirectCommandsLayoutUsageFlagBitsNVX, VkIndirectCommandsLayoutUsageFlagsNVX>;
16370
16371 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 )
16372 {
16373 return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1;
16374 }
16375
16376 VULKAN_HPP_INLINE IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits )
16377 {
16378 return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) );
16379 }
16380
16381 template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNVX>
16382 {
16383 enum
16384 {
16385 allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences)
16386 };
16387 };
16388
16389 enum class ObjectEntryUsageFlagBitsNVX
16390 {
16391 eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX,
16392 eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX
16393 };
16394
16395 using ObjectEntryUsageFlagsNVX = Flags<ObjectEntryUsageFlagBitsNVX, VkObjectEntryUsageFlagsNVX>;
16396
16397 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 )
16398 {
16399 return ObjectEntryUsageFlagsNVX( bit0 ) | bit1;
16400 }
16401
16402 VULKAN_HPP_INLINE ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits )
16403 {
16404 return ~( ObjectEntryUsageFlagsNVX( bits ) );
16405 }
16406
16407 template <> struct FlagTraits<ObjectEntryUsageFlagBitsNVX>
16408 {
16409 enum
16410 {
16411 allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute)
16412 };
16413 };
16414
16415 enum class IndirectCommandsTokenTypeNVX
16416 {
16417 eVkIndirectCommandsTokenPipeline = VK_INDIRECT_COMMANDS_TOKEN_PIPELINE_NVX,
16418 eVkIndirectCommandsTokenDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_DESCRIPTOR_SET_NVX,
16419 eVkIndirectCommandsTokenIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_INDEX_BUFFER_NVX,
16420 eVkIndirectCommandsTokenVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_VERTEX_BUFFER_NVX,
16421 eVkIndirectCommandsTokenPushConstant = VK_INDIRECT_COMMANDS_TOKEN_PUSH_CONSTANT_NVX,
16422 eVkIndirectCommandsTokenDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_DRAW_INDEXED_NVX,
16423 eVkIndirectCommandsTokenDraw = VK_INDIRECT_COMMANDS_TOKEN_DRAW_NVX,
16424 eVkIndirectCommandsTokenDispatch = VK_INDIRECT_COMMANDS_TOKEN_DISPATCH_NVX
16425 };
16426
16427 struct IndirectCommandsTokenNVX
16428 {
16429 IndirectCommandsTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, Buffer buffer_ = Buffer(), DeviceSize offset_ = 0 )
16430 : tokenType( tokenType_ )
16431 , buffer( buffer_ )
16432 , offset( offset_ )
16433 {
16434 }
16435
16436 IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs )
16437 {
16438 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
16439 }
16440
16441 IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs )
16442 {
16443 memcpy( this, &rhs, sizeof(IndirectCommandsTokenNVX) );
16444 return *this;
16445 }
16446
16447 IndirectCommandsTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
16448 {
16449 tokenType = tokenType_;
16450 return *this;
16451 }
16452
16453 IndirectCommandsTokenNVX& setBuffer( Buffer buffer_ )
16454 {
16455 buffer = buffer_;
16456 return *this;
16457 }
16458
16459 IndirectCommandsTokenNVX& setOffset( DeviceSize offset_ )
16460 {
16461 offset = offset_;
16462 return *this;
16463 }
16464
16465 operator const VkIndirectCommandsTokenNVX&() const
16466 {
16467 return *reinterpret_cast<const VkIndirectCommandsTokenNVX*>(this);
16468 }
16469
16470 bool operator==( IndirectCommandsTokenNVX const& rhs ) const
16471 {
16472 return ( tokenType == rhs.tokenType )
16473 && ( buffer == rhs.buffer )
16474 && ( offset == rhs.offset );
16475 }
16476
16477 bool operator!=( IndirectCommandsTokenNVX const& rhs ) const
16478 {
16479 return !operator==( rhs );
16480 }
16481
16482 IndirectCommandsTokenTypeNVX tokenType;
16483 Buffer buffer;
16484 DeviceSize offset;
16485 };
16486 static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" );
16487
16488 struct IndirectCommandsLayoutTokenNVX
16489 {
16490 IndirectCommandsLayoutTokenNVX( IndirectCommandsTokenTypeNVX tokenType_ = IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline, uint32_t bindingUnit_ = 0, uint32_t dynamicCount_ = 0, uint32_t divisor_ = 0 )
16491 : tokenType( tokenType_ )
16492 , bindingUnit( bindingUnit_ )
16493 , dynamicCount( dynamicCount_ )
16494 , divisor( divisor_ )
16495 {
16496 }
16497
16498 IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs )
16499 {
16500 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16501 }
16502
16503 IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs )
16504 {
16505 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutTokenNVX) );
16506 return *this;
16507 }
16508
16509 IndirectCommandsLayoutTokenNVX& setTokenType( IndirectCommandsTokenTypeNVX tokenType_ )
16510 {
16511 tokenType = tokenType_;
16512 return *this;
16513 }
16514
16515 IndirectCommandsLayoutTokenNVX& setBindingUnit( uint32_t bindingUnit_ )
16516 {
16517 bindingUnit = bindingUnit_;
16518 return *this;
16519 }
16520
16521 IndirectCommandsLayoutTokenNVX& setDynamicCount( uint32_t dynamicCount_ )
16522 {
16523 dynamicCount = dynamicCount_;
16524 return *this;
16525 }
16526
16527 IndirectCommandsLayoutTokenNVX& setDivisor( uint32_t divisor_ )
16528 {
16529 divisor = divisor_;
16530 return *this;
16531 }
16532
16533 operator const VkIndirectCommandsLayoutTokenNVX&() const
16534 {
16535 return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNVX*>(this);
16536 }
16537
16538 bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const
16539 {
16540 return ( tokenType == rhs.tokenType )
16541 && ( bindingUnit == rhs.bindingUnit )
16542 && ( dynamicCount == rhs.dynamicCount )
16543 && ( divisor == rhs.divisor );
16544 }
16545
16546 bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const
16547 {
16548 return !operator==( rhs );
16549 }
16550
16551 IndirectCommandsTokenTypeNVX tokenType;
16552 uint32_t bindingUnit;
16553 uint32_t dynamicCount;
16554 uint32_t divisor;
16555 };
16556 static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" );
16557
16558 struct IndirectCommandsLayoutCreateInfoNVX
16559 {
16560 IndirectCommandsLayoutCreateInfoNVX( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, IndirectCommandsLayoutUsageFlagsNVX flags_ = IndirectCommandsLayoutUsageFlagsNVX(), uint32_t tokenCount_ = 0, const IndirectCommandsLayoutTokenNVX* pTokens_ = nullptr )
16561 : sType( StructureType::eIndirectCommandsLayoutCreateInfoNVX )
16562 , pNext( nullptr )
16563 , pipelineBindPoint( pipelineBindPoint_ )
16564 , flags( flags_ )
16565 , tokenCount( tokenCount_ )
16566 , pTokens( pTokens_ )
16567 {
16568 }
16569
16570 IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16571 {
16572 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16573 }
16574
16575 IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs )
16576 {
16577 memcpy( this, &rhs, sizeof(IndirectCommandsLayoutCreateInfoNVX) );
16578 return *this;
16579 }
16580
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016581 IndirectCommandsLayoutCreateInfoNVX& setPNext( const void* pNext_ )
16582 {
16583 pNext = pNext_;
16584 return *this;
16585 }
16586
16587 IndirectCommandsLayoutCreateInfoNVX& setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ )
16588 {
16589 pipelineBindPoint = pipelineBindPoint_;
16590 return *this;
16591 }
16592
16593 IndirectCommandsLayoutCreateInfoNVX& setFlags( IndirectCommandsLayoutUsageFlagsNVX flags_ )
16594 {
16595 flags = flags_;
16596 return *this;
16597 }
16598
16599 IndirectCommandsLayoutCreateInfoNVX& setTokenCount( uint32_t tokenCount_ )
16600 {
16601 tokenCount = tokenCount_;
16602 return *this;
16603 }
16604
16605 IndirectCommandsLayoutCreateInfoNVX& setPTokens( const IndirectCommandsLayoutTokenNVX* pTokens_ )
16606 {
16607 pTokens = pTokens_;
16608 return *this;
16609 }
16610
16611 operator const VkIndirectCommandsLayoutCreateInfoNVX&() const
16612 {
16613 return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>(this);
16614 }
16615
16616 bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16617 {
16618 return ( sType == rhs.sType )
16619 && ( pNext == rhs.pNext )
16620 && ( pipelineBindPoint == rhs.pipelineBindPoint )
16621 && ( flags == rhs.flags )
16622 && ( tokenCount == rhs.tokenCount )
16623 && ( pTokens == rhs.pTokens );
16624 }
16625
16626 bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const
16627 {
16628 return !operator==( rhs );
16629 }
16630
16631 private:
16632 StructureType sType;
16633
16634 public:
16635 const void* pNext;
16636 PipelineBindPoint pipelineBindPoint;
16637 IndirectCommandsLayoutUsageFlagsNVX flags;
16638 uint32_t tokenCount;
16639 const IndirectCommandsLayoutTokenNVX* pTokens;
16640 };
16641 static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" );
16642
16643 enum class ObjectEntryTypeNVX
16644 {
16645 eVkObjectEntryDescriptorSet = VK_OBJECT_ENTRY_DESCRIPTOR_SET_NVX,
16646 eVkObjectEntryPipeline = VK_OBJECT_ENTRY_PIPELINE_NVX,
16647 eVkObjectEntryIndexBuffer = VK_OBJECT_ENTRY_INDEX_BUFFER_NVX,
16648 eVkObjectEntryVertexBuffer = VK_OBJECT_ENTRY_VERTEX_BUFFER_NVX,
16649 eVkObjectEntryPushConstant = VK_OBJECT_ENTRY_PUSH_CONSTANT_NVX
16650 };
16651
16652 struct ObjectTableCreateInfoNVX
16653 {
16654 ObjectTableCreateInfoNVX( uint32_t objectCount_ = 0, const ObjectEntryTypeNVX* pObjectEntryTypes_ = nullptr, const uint32_t* pObjectEntryCounts_ = nullptr, const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = nullptr, uint32_t maxUniformBuffersPerDescriptor_ = 0, uint32_t maxStorageBuffersPerDescriptor_ = 0, uint32_t maxStorageImagesPerDescriptor_ = 0, uint32_t maxSampledImagesPerDescriptor_ = 0, uint32_t maxPipelineLayouts_ = 0 )
16655 : sType( StructureType::eObjectTableCreateInfoNVX )
16656 , pNext( nullptr )
16657 , objectCount( objectCount_ )
16658 , pObjectEntryTypes( pObjectEntryTypes_ )
16659 , pObjectEntryCounts( pObjectEntryCounts_ )
16660 , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ )
16661 , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ )
16662 , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ )
16663 , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ )
16664 , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ )
16665 , maxPipelineLayouts( maxPipelineLayouts_ )
16666 {
16667 }
16668
16669 ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs )
16670 {
16671 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16672 }
16673
16674 ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs )
16675 {
16676 memcpy( this, &rhs, sizeof(ObjectTableCreateInfoNVX) );
16677 return *this;
16678 }
16679
Mark Lobodzinski2d589822016-12-12 09:44:34 -070016680 ObjectTableCreateInfoNVX& setPNext( const void* pNext_ )
16681 {
16682 pNext = pNext_;
16683 return *this;
16684 }
16685
16686 ObjectTableCreateInfoNVX& setObjectCount( uint32_t objectCount_ )
16687 {
16688 objectCount = objectCount_;
16689 return *this;
16690 }
16691
16692 ObjectTableCreateInfoNVX& setPObjectEntryTypes( const ObjectEntryTypeNVX* pObjectEntryTypes_ )
16693 {
16694 pObjectEntryTypes = pObjectEntryTypes_;
16695 return *this;
16696 }
16697
16698 ObjectTableCreateInfoNVX& setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ )
16699 {
16700 pObjectEntryCounts = pObjectEntryCounts_;
16701 return *this;
16702 }
16703
16704 ObjectTableCreateInfoNVX& setPObjectEntryUsageFlags( const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ )
16705 {
16706 pObjectEntryUsageFlags = pObjectEntryUsageFlags_;
16707 return *this;
16708 }
16709
16710 ObjectTableCreateInfoNVX& setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ )
16711 {
16712 maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_;
16713 return *this;
16714 }
16715
16716 ObjectTableCreateInfoNVX& setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ )
16717 {
16718 maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_;
16719 return *this;
16720 }
16721
16722 ObjectTableCreateInfoNVX& setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ )
16723 {
16724 maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_;
16725 return *this;
16726 }
16727
16728 ObjectTableCreateInfoNVX& setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ )
16729 {
16730 maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_;
16731 return *this;
16732 }
16733
16734 ObjectTableCreateInfoNVX& setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ )
16735 {
16736 maxPipelineLayouts = maxPipelineLayouts_;
16737 return *this;
16738 }
16739
16740 operator const VkObjectTableCreateInfoNVX&() const
16741 {
16742 return *reinterpret_cast<const VkObjectTableCreateInfoNVX*>(this);
16743 }
16744
16745 bool operator==( ObjectTableCreateInfoNVX const& rhs ) const
16746 {
16747 return ( sType == rhs.sType )
16748 && ( pNext == rhs.pNext )
16749 && ( objectCount == rhs.objectCount )
16750 && ( pObjectEntryTypes == rhs.pObjectEntryTypes )
16751 && ( pObjectEntryCounts == rhs.pObjectEntryCounts )
16752 && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags )
16753 && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor )
16754 && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor )
16755 && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor )
16756 && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor )
16757 && ( maxPipelineLayouts == rhs.maxPipelineLayouts );
16758 }
16759
16760 bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const
16761 {
16762 return !operator==( rhs );
16763 }
16764
16765 private:
16766 StructureType sType;
16767
16768 public:
16769 const void* pNext;
16770 uint32_t objectCount;
16771 const ObjectEntryTypeNVX* pObjectEntryTypes;
16772 const uint32_t* pObjectEntryCounts;
16773 const ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags;
16774 uint32_t maxUniformBuffersPerDescriptor;
16775 uint32_t maxStorageBuffersPerDescriptor;
16776 uint32_t maxStorageImagesPerDescriptor;
16777 uint32_t maxSampledImagesPerDescriptor;
16778 uint32_t maxPipelineLayouts;
16779 };
16780 static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" );
16781
16782 struct ObjectTableEntryNVX
16783 {
16784 ObjectTableEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX() )
16785 : type( type_ )
16786 , flags( flags_ )
16787 {
16788 }
16789
16790 ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs )
16791 {
16792 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16793 }
16794
16795 ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs )
16796 {
16797 memcpy( this, &rhs, sizeof(ObjectTableEntryNVX) );
16798 return *this;
16799 }
16800
16801 ObjectTableEntryNVX& setType( ObjectEntryTypeNVX type_ )
16802 {
16803 type = type_;
16804 return *this;
16805 }
16806
16807 ObjectTableEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16808 {
16809 flags = flags_;
16810 return *this;
16811 }
16812
16813 operator const VkObjectTableEntryNVX&() const
16814 {
16815 return *reinterpret_cast<const VkObjectTableEntryNVX*>(this);
16816 }
16817
16818 bool operator==( ObjectTableEntryNVX const& rhs ) const
16819 {
16820 return ( type == rhs.type )
16821 && ( flags == rhs.flags );
16822 }
16823
16824 bool operator!=( ObjectTableEntryNVX const& rhs ) const
16825 {
16826 return !operator==( rhs );
16827 }
16828
16829 ObjectEntryTypeNVX type;
16830 ObjectEntryUsageFlagsNVX flags;
16831 };
16832 static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" );
16833
16834 struct ObjectTablePipelineEntryNVX
16835 {
16836 ObjectTablePipelineEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Pipeline pipeline_ = Pipeline() )
16837 : type( type_ )
16838 , flags( flags_ )
16839 , pipeline( pipeline_ )
16840 {
16841 }
16842
16843 ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs )
16844 {
16845 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16846 }
16847
16848 ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs )
16849 {
16850 memcpy( this, &rhs, sizeof(ObjectTablePipelineEntryNVX) );
16851 return *this;
16852 }
16853
16854 ObjectTablePipelineEntryNVX& setType( ObjectEntryTypeNVX type_ )
16855 {
16856 type = type_;
16857 return *this;
16858 }
16859
16860 ObjectTablePipelineEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16861 {
16862 flags = flags_;
16863 return *this;
16864 }
16865
16866 ObjectTablePipelineEntryNVX& setPipeline( Pipeline pipeline_ )
16867 {
16868 pipeline = pipeline_;
16869 return *this;
16870 }
16871
16872 operator const VkObjectTablePipelineEntryNVX&() const
16873 {
16874 return *reinterpret_cast<const VkObjectTablePipelineEntryNVX*>(this);
16875 }
16876
16877 bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const
16878 {
16879 return ( type == rhs.type )
16880 && ( flags == rhs.flags )
16881 && ( pipeline == rhs.pipeline );
16882 }
16883
16884 bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const
16885 {
16886 return !operator==( rhs );
16887 }
16888
16889 ObjectEntryTypeNVX type;
16890 ObjectEntryUsageFlagsNVX flags;
16891 Pipeline pipeline;
16892 };
16893 static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" );
16894
16895 struct ObjectTableDescriptorSetEntryNVX
16896 {
16897 ObjectTableDescriptorSetEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), DescriptorSet descriptorSet_ = DescriptorSet() )
16898 : type( type_ )
16899 , flags( flags_ )
16900 , pipelineLayout( pipelineLayout_ )
16901 , descriptorSet( descriptorSet_ )
16902 {
16903 }
16904
16905 ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs )
16906 {
16907 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
16908 }
16909
16910 ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs )
16911 {
16912 memcpy( this, &rhs, sizeof(ObjectTableDescriptorSetEntryNVX) );
16913 return *this;
16914 }
16915
16916 ObjectTableDescriptorSetEntryNVX& setType( ObjectEntryTypeNVX type_ )
16917 {
16918 type = type_;
16919 return *this;
16920 }
16921
16922 ObjectTableDescriptorSetEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16923 {
16924 flags = flags_;
16925 return *this;
16926 }
16927
16928 ObjectTableDescriptorSetEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
16929 {
16930 pipelineLayout = pipelineLayout_;
16931 return *this;
16932 }
16933
16934 ObjectTableDescriptorSetEntryNVX& setDescriptorSet( DescriptorSet descriptorSet_ )
16935 {
16936 descriptorSet = descriptorSet_;
16937 return *this;
16938 }
16939
16940 operator const VkObjectTableDescriptorSetEntryNVX&() const
16941 {
16942 return *reinterpret_cast<const VkObjectTableDescriptorSetEntryNVX*>(this);
16943 }
16944
16945 bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const
16946 {
16947 return ( type == rhs.type )
16948 && ( flags == rhs.flags )
16949 && ( pipelineLayout == rhs.pipelineLayout )
16950 && ( descriptorSet == rhs.descriptorSet );
16951 }
16952
16953 bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const
16954 {
16955 return !operator==( rhs );
16956 }
16957
16958 ObjectEntryTypeNVX type;
16959 ObjectEntryUsageFlagsNVX flags;
16960 PipelineLayout pipelineLayout;
16961 DescriptorSet descriptorSet;
16962 };
16963 static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" );
16964
16965 struct ObjectTableVertexBufferEntryNVX
16966 {
16967 ObjectTableVertexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer() )
16968 : type( type_ )
16969 , flags( flags_ )
16970 , buffer( buffer_ )
16971 {
16972 }
16973
16974 ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs )
16975 {
16976 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
16977 }
16978
16979 ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs )
16980 {
16981 memcpy( this, &rhs, sizeof(ObjectTableVertexBufferEntryNVX) );
16982 return *this;
16983 }
16984
16985 ObjectTableVertexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
16986 {
16987 type = type_;
16988 return *this;
16989 }
16990
16991 ObjectTableVertexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
16992 {
16993 flags = flags_;
16994 return *this;
16995 }
16996
16997 ObjectTableVertexBufferEntryNVX& setBuffer( Buffer buffer_ )
16998 {
16999 buffer = buffer_;
17000 return *this;
17001 }
17002
17003 operator const VkObjectTableVertexBufferEntryNVX&() const
17004 {
17005 return *reinterpret_cast<const VkObjectTableVertexBufferEntryNVX*>(this);
17006 }
17007
17008 bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const
17009 {
17010 return ( type == rhs.type )
17011 && ( flags == rhs.flags )
17012 && ( buffer == rhs.buffer );
17013 }
17014
17015 bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const
17016 {
17017 return !operator==( rhs );
17018 }
17019
17020 ObjectEntryTypeNVX type;
17021 ObjectEntryUsageFlagsNVX flags;
17022 Buffer buffer;
17023 };
17024 static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" );
17025
17026 struct ObjectTableIndexBufferEntryNVX
17027 {
Mark Young39389872017-01-19 21:10:49 -070017028 ObjectTableIndexBufferEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), Buffer buffer_ = Buffer(), IndexType indexType_ = IndexType::eUint16 )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017029 : type( type_ )
17030 , flags( flags_ )
17031 , buffer( buffer_ )
Mark Young39389872017-01-19 21:10:49 -070017032 , indexType( indexType_ )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017033 {
17034 }
17035
17036 ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs )
17037 {
17038 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
17039 }
17040
17041 ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs )
17042 {
17043 memcpy( this, &rhs, sizeof(ObjectTableIndexBufferEntryNVX) );
17044 return *this;
17045 }
17046
17047 ObjectTableIndexBufferEntryNVX& setType( ObjectEntryTypeNVX type_ )
17048 {
17049 type = type_;
17050 return *this;
17051 }
17052
17053 ObjectTableIndexBufferEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17054 {
17055 flags = flags_;
17056 return *this;
17057 }
17058
17059 ObjectTableIndexBufferEntryNVX& setBuffer( Buffer buffer_ )
17060 {
17061 buffer = buffer_;
17062 return *this;
17063 }
17064
Mark Young39389872017-01-19 21:10:49 -070017065 ObjectTableIndexBufferEntryNVX& setIndexType( IndexType indexType_ )
17066 {
17067 indexType = indexType_;
17068 return *this;
17069 }
17070
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017071 operator const VkObjectTableIndexBufferEntryNVX&() const
17072 {
17073 return *reinterpret_cast<const VkObjectTableIndexBufferEntryNVX*>(this);
17074 }
17075
17076 bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const
17077 {
17078 return ( type == rhs.type )
17079 && ( flags == rhs.flags )
Mark Young39389872017-01-19 21:10:49 -070017080 && ( buffer == rhs.buffer )
17081 && ( indexType == rhs.indexType );
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017082 }
17083
17084 bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const
17085 {
17086 return !operator==( rhs );
17087 }
17088
17089 ObjectEntryTypeNVX type;
17090 ObjectEntryUsageFlagsNVX flags;
17091 Buffer buffer;
Mark Young39389872017-01-19 21:10:49 -070017092 IndexType indexType;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017093 };
17094 static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" );
17095
17096 struct ObjectTablePushConstantEntryNVX
17097 {
17098 ObjectTablePushConstantEntryNVX( ObjectEntryTypeNVX type_ = ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet, ObjectEntryUsageFlagsNVX flags_ = ObjectEntryUsageFlagsNVX(), PipelineLayout pipelineLayout_ = PipelineLayout(), ShaderStageFlags stageFlags_ = ShaderStageFlags() )
17099 : type( type_ )
17100 , flags( flags_ )
17101 , pipelineLayout( pipelineLayout_ )
17102 , stageFlags( stageFlags_ )
17103 {
17104 }
17105
17106 ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs )
17107 {
17108 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
17109 }
17110
17111 ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs )
17112 {
17113 memcpy( this, &rhs, sizeof(ObjectTablePushConstantEntryNVX) );
17114 return *this;
17115 }
17116
17117 ObjectTablePushConstantEntryNVX& setType( ObjectEntryTypeNVX type_ )
17118 {
17119 type = type_;
17120 return *this;
17121 }
17122
17123 ObjectTablePushConstantEntryNVX& setFlags( ObjectEntryUsageFlagsNVX flags_ )
17124 {
17125 flags = flags_;
17126 return *this;
17127 }
17128
17129 ObjectTablePushConstantEntryNVX& setPipelineLayout( PipelineLayout pipelineLayout_ )
17130 {
17131 pipelineLayout = pipelineLayout_;
17132 return *this;
17133 }
17134
17135 ObjectTablePushConstantEntryNVX& setStageFlags( ShaderStageFlags stageFlags_ )
17136 {
17137 stageFlags = stageFlags_;
17138 return *this;
17139 }
17140
17141 operator const VkObjectTablePushConstantEntryNVX&() const
17142 {
17143 return *reinterpret_cast<const VkObjectTablePushConstantEntryNVX*>(this);
17144 }
17145
17146 bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const
17147 {
17148 return ( type == rhs.type )
17149 && ( flags == rhs.flags )
17150 && ( pipelineLayout == rhs.pipelineLayout )
17151 && ( stageFlags == rhs.stageFlags );
17152 }
17153
17154 bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const
17155 {
17156 return !operator==( rhs );
17157 }
17158
17159 ObjectEntryTypeNVX type;
17160 ObjectEntryUsageFlagsNVX flags;
17161 PipelineLayout pipelineLayout;
17162 ShaderStageFlags stageFlags;
17163 };
17164 static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" );
17165
Mark Young39389872017-01-19 21:10:49 -070017166 enum class SurfaceCounterFlagBitsEXT
17167 {
17168 eVblankExt = VK_SURFACE_COUNTER_VBLANK_EXT
17169 };
17170
17171 using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT, VkSurfaceCounterFlagsEXT>;
17172
17173 VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 )
17174 {
17175 return SurfaceCounterFlagsEXT( bit0 ) | bit1;
17176 }
17177
17178 VULKAN_HPP_INLINE SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits )
17179 {
17180 return ~( SurfaceCounterFlagsEXT( bits ) );
17181 }
17182
17183 template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
17184 {
17185 enum
17186 {
17187 allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblankExt)
17188 };
17189 };
17190
17191 struct SurfaceCapabilities2EXT
17192 {
17193 operator const VkSurfaceCapabilities2EXT&() const
17194 {
17195 return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>(this);
17196 }
17197
17198 bool operator==( SurfaceCapabilities2EXT const& rhs ) const
17199 {
17200 return ( sType == rhs.sType )
17201 && ( pNext == rhs.pNext )
17202 && ( minImageCount == rhs.minImageCount )
17203 && ( maxImageCount == rhs.maxImageCount )
17204 && ( currentExtent == rhs.currentExtent )
17205 && ( minImageExtent == rhs.minImageExtent )
17206 && ( maxImageExtent == rhs.maxImageExtent )
17207 && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
17208 && ( supportedTransforms == rhs.supportedTransforms )
17209 && ( currentTransform == rhs.currentTransform )
17210 && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
17211 && ( supportedUsageFlags == rhs.supportedUsageFlags )
17212 && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
17213 }
17214
17215 bool operator!=( SurfaceCapabilities2EXT const& rhs ) const
17216 {
17217 return !operator==( rhs );
17218 }
17219
17220 private:
17221 StructureType sType;
17222
17223 public:
17224 void* pNext;
17225 uint32_t minImageCount;
17226 uint32_t maxImageCount;
17227 Extent2D currentExtent;
17228 Extent2D minImageExtent;
17229 Extent2D maxImageExtent;
17230 uint32_t maxImageArrayLayers;
17231 SurfaceTransformFlagsKHR supportedTransforms;
17232 SurfaceTransformFlagBitsKHR currentTransform;
17233 CompositeAlphaFlagsKHR supportedCompositeAlpha;
17234 ImageUsageFlags supportedUsageFlags;
17235 SurfaceCounterFlagsEXT supportedSurfaceCounters;
17236 };
17237 static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
17238
17239 struct SwapchainCounterCreateInfoEXT
17240 {
17241 SwapchainCounterCreateInfoEXT( SurfaceCounterFlagsEXT surfaceCounters_ = SurfaceCounterFlagsEXT() )
17242 : sType( StructureType::eSwapchainCounterCreateInfoEXT )
17243 , pNext( nullptr )
17244 , surfaceCounters( surfaceCounters_ )
17245 {
17246 }
17247
17248 SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs )
17249 {
17250 memcpy( this, &rhs, sizeof(SwapchainCounterCreateInfoEXT) );
17251 }
17252
17253 SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs )
17254 {
17255 memcpy( this, &rhs, sizeof(SwapchainCounterCreateInfoEXT) );
17256 return *this;
17257 }
17258
Mark Young39389872017-01-19 21:10:49 -070017259 SwapchainCounterCreateInfoEXT& setPNext( const void* pNext_ )
17260 {
17261 pNext = pNext_;
17262 return *this;
17263 }
17264
17265 SwapchainCounterCreateInfoEXT& setSurfaceCounters( SurfaceCounterFlagsEXT surfaceCounters_ )
17266 {
17267 surfaceCounters = surfaceCounters_;
17268 return *this;
17269 }
17270
17271 operator const VkSwapchainCounterCreateInfoEXT&() const
17272 {
17273 return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>(this);
17274 }
17275
17276 bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const
17277 {
17278 return ( sType == rhs.sType )
17279 && ( pNext == rhs.pNext )
17280 && ( surfaceCounters == rhs.surfaceCounters );
17281 }
17282
17283 bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const
17284 {
17285 return !operator==( rhs );
17286 }
17287
17288 private:
17289 StructureType sType;
17290
17291 public:
17292 const void* pNext;
17293 SurfaceCounterFlagsEXT surfaceCounters;
17294 };
17295 static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
17296
17297 enum class DisplayPowerStateEXT
17298 {
17299 eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
17300 eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
17301 eOn = VK_DISPLAY_POWER_STATE_ON_EXT
17302 };
17303
17304 struct DisplayPowerInfoEXT
17305 {
17306 DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff )
17307 : sType( StructureType::eDisplayPowerInfoEXT )
17308 , pNext( nullptr )
17309 , powerState( powerState_ )
17310 {
17311 }
17312
17313 DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs )
17314 {
17315 memcpy( this, &rhs, sizeof(DisplayPowerInfoEXT) );
17316 }
17317
17318 DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs )
17319 {
17320 memcpy( this, &rhs, sizeof(DisplayPowerInfoEXT) );
17321 return *this;
17322 }
17323
Mark Young39389872017-01-19 21:10:49 -070017324 DisplayPowerInfoEXT& setPNext( const void* pNext_ )
17325 {
17326 pNext = pNext_;
17327 return *this;
17328 }
17329
17330 DisplayPowerInfoEXT& setPowerState( DisplayPowerStateEXT powerState_ )
17331 {
17332 powerState = powerState_;
17333 return *this;
17334 }
17335
17336 operator const VkDisplayPowerInfoEXT&() const
17337 {
17338 return *reinterpret_cast<const VkDisplayPowerInfoEXT*>(this);
17339 }
17340
17341 bool operator==( DisplayPowerInfoEXT const& rhs ) const
17342 {
17343 return ( sType == rhs.sType )
17344 && ( pNext == rhs.pNext )
17345 && ( powerState == rhs.powerState );
17346 }
17347
17348 bool operator!=( DisplayPowerInfoEXT const& rhs ) const
17349 {
17350 return !operator==( rhs );
17351 }
17352
17353 private:
17354 StructureType sType;
17355
17356 public:
17357 const void* pNext;
17358 DisplayPowerStateEXT powerState;
17359 };
17360 static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
17361
17362 enum class DeviceEventTypeEXT
17363 {
17364 eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
17365 };
17366
17367 struct DeviceEventInfoEXT
17368 {
17369 DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug )
17370 : sType( StructureType::eDeviceEventInfoEXT )
17371 , pNext( nullptr )
17372 , deviceEvent( deviceEvent_ )
17373 {
17374 }
17375
17376 DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs )
17377 {
17378 memcpy( this, &rhs, sizeof(DeviceEventInfoEXT) );
17379 }
17380
17381 DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs )
17382 {
17383 memcpy( this, &rhs, sizeof(DeviceEventInfoEXT) );
17384 return *this;
17385 }
17386
Mark Young39389872017-01-19 21:10:49 -070017387 DeviceEventInfoEXT& setPNext( const void* pNext_ )
17388 {
17389 pNext = pNext_;
17390 return *this;
17391 }
17392
17393 DeviceEventInfoEXT& setDeviceEvent( DeviceEventTypeEXT deviceEvent_ )
17394 {
17395 deviceEvent = deviceEvent_;
17396 return *this;
17397 }
17398
17399 operator const VkDeviceEventInfoEXT&() const
17400 {
17401 return *reinterpret_cast<const VkDeviceEventInfoEXT*>(this);
17402 }
17403
17404 bool operator==( DeviceEventInfoEXT const& rhs ) const
17405 {
17406 return ( sType == rhs.sType )
17407 && ( pNext == rhs.pNext )
17408 && ( deviceEvent == rhs.deviceEvent );
17409 }
17410
17411 bool operator!=( DeviceEventInfoEXT const& rhs ) const
17412 {
17413 return !operator==( rhs );
17414 }
17415
17416 private:
17417 StructureType sType;
17418
17419 public:
17420 const void* pNext;
17421 DeviceEventTypeEXT deviceEvent;
17422 };
17423 static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
17424
17425 enum class DisplayEventTypeEXT
17426 {
17427 eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
17428 };
17429
17430 struct DisplayEventInfoEXT
17431 {
17432 DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut )
17433 : sType( StructureType::eDisplayEventInfoEXT )
17434 , pNext( nullptr )
17435 , displayEvent( displayEvent_ )
17436 {
17437 }
17438
17439 DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs )
17440 {
17441 memcpy( this, &rhs, sizeof(DisplayEventInfoEXT) );
17442 }
17443
17444 DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs )
17445 {
17446 memcpy( this, &rhs, sizeof(DisplayEventInfoEXT) );
17447 return *this;
17448 }
17449
Mark Young39389872017-01-19 21:10:49 -070017450 DisplayEventInfoEXT& setPNext( const void* pNext_ )
17451 {
17452 pNext = pNext_;
17453 return *this;
17454 }
17455
17456 DisplayEventInfoEXT& setDisplayEvent( DisplayEventTypeEXT displayEvent_ )
17457 {
17458 displayEvent = displayEvent_;
17459 return *this;
17460 }
17461
17462 operator const VkDisplayEventInfoEXT&() const
17463 {
17464 return *reinterpret_cast<const VkDisplayEventInfoEXT*>(this);
17465 }
17466
17467 bool operator==( DisplayEventInfoEXT const& rhs ) const
17468 {
17469 return ( sType == rhs.sType )
17470 && ( pNext == rhs.pNext )
17471 && ( displayEvent == rhs.displayEvent );
17472 }
17473
17474 bool operator!=( DisplayEventInfoEXT const& rhs ) const
17475 {
17476 return !operator==( rhs );
17477 }
17478
17479 private:
17480 StructureType sType;
17481
17482 public:
17483 const void* pNext;
17484 DisplayEventTypeEXT displayEvent;
17485 };
17486 static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
17487
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017488 Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties );
17489#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17490 template <typename Allocator = std::allocator<LayerProperties>>
17491 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties();
17492#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17493
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017494 VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties )
17495 {
17496 return static_cast<Result>( vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
17497 }
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017498#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017499 template <typename Allocator>
17500 VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateInstanceLayerProperties()
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017501 {
17502 std::vector<LayerProperties,Allocator> properties;
17503 uint32_t propertyCount;
17504 Result result;
17505 do
17506 {
17507 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
17508 if ( ( result == Result::eSuccess ) && propertyCount )
17509 {
17510 properties.resize( propertyCount );
17511 result = static_cast<Result>( vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
17512 }
17513 } while ( result == Result::eIncomplete );
17514 assert( propertyCount <= properties.size() );
17515 properties.resize( propertyCount );
17516 return createResultValue( result, properties, "vk::enumerateInstanceLayerProperties" );
17517 }
17518#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17519
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017520 Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties );
17521#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17522 template <typename Allocator = std::allocator<ExtensionProperties>>
17523 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName = nullptr );
17524#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17525
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017526 VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties )
17527 {
17528 return static_cast<Result>( vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
17529 }
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017530#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017531 template <typename Allocator>
17532 VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017533 {
17534 std::vector<ExtensionProperties,Allocator> properties;
17535 uint32_t propertyCount;
17536 Result result;
17537 do
17538 {
17539 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
17540 if ( ( result == Result::eSuccess ) && propertyCount )
17541 {
17542 properties.resize( propertyCount );
17543 result = static_cast<Result>( vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
17544 }
17545 } while ( result == Result::eIncomplete );
17546 assert( propertyCount <= properties.size() );
17547 properties.resize( propertyCount );
17548 return createResultValue( result, properties, "vk::enumerateInstanceExtensionProperties" );
17549 }
17550#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17551
17552 // forward declarations
17553 struct CmdProcessCommandsInfoNVX;
17554
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017555 class CommandBuffer
17556 {
17557 public:
17558 CommandBuffer()
17559 : m_commandBuffer(VK_NULL_HANDLE)
17560 {}
17561
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070017562 CommandBuffer( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017563 : m_commandBuffer(VK_NULL_HANDLE)
17564 {}
17565
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017566#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17567 CommandBuffer(VkCommandBuffer commandBuffer)
17568 : m_commandBuffer(commandBuffer)
17569 {}
17570
17571 CommandBuffer& operator=(VkCommandBuffer commandBuffer)
17572 {
17573 m_commandBuffer = commandBuffer;
17574 return *this;
17575 }
17576#endif
17577
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070017578 CommandBuffer& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017579 {
17580 m_commandBuffer = VK_NULL_HANDLE;
17581 return *this;
17582 }
17583
Lenny Komowebf33162016-08-26 14:10:08 -060017584 bool operator==(CommandBuffer const &rhs) const
17585 {
17586 return m_commandBuffer == rhs.m_commandBuffer;
17587 }
17588
17589 bool operator!=(CommandBuffer const &rhs) const
17590 {
17591 return m_commandBuffer != rhs.m_commandBuffer;
17592 }
17593
17594 bool operator<(CommandBuffer const &rhs) const
17595 {
17596 return m_commandBuffer < rhs.m_commandBuffer;
17597 }
17598
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017599 Result begin( const CommandBufferBeginInfo* pBeginInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017600#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017601 ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017602#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17603
17604#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017605 Result end() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017606#else
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017607 ResultValueType<void>::type end() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017608#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17609
17610#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017611 Result reset( CommandBufferResetFlags flags ) const;
17612#else
17613 ResultValueType<void>::type reset( CommandBufferResetFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017614#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17615
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017616 void bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017617
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017618 void setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017619#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017620 void setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017621#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17622
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017623 void setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017624#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017625 void setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017626#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17627
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017628 void setLineWidth( float lineWidth ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017629
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017630 void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const;
17631
17632 void setBlendConstants( const float blendConstants[4] ) const;
17633
17634 void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const;
17635
17636 void setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const;
17637
17638 void setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const;
17639
17640 void setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const;
17641
17642 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017643#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017644 void bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017645#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17646
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017647 void bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017648
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017649 void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017650#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017651 void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017652#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17653
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017654 void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017655
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017656 void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const;
17657
17658 void drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const;
17659
17660 void drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const;
17661
17662 void dispatch( uint32_t x, uint32_t y, uint32_t z ) const;
17663
17664 void dispatchIndirect( Buffer buffer, DeviceSize offset ) const;
17665
17666 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017667#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017668 void copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017669#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17670
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017671 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017672#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017673 void copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017674#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17675
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017676 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017677#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017678 void blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017679#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17680
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017681 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017682#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017683 void copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017684#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17685
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017686 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017687#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017688 void copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017689#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17690
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017691 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017692#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17693 template <typename T>
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017694 void updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017695#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17696
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017697 void fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017698
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017699 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017700#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017701 void clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017702#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17703
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017704 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017705#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017706 void clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017707#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17708
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017709 void clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017710#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017711 void clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017712#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17713
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017714 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017715#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017716 void resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017717#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17718
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017719 void setEvent( Event event, PipelineStageFlags stageMask ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017720
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017721 void resetEvent( Event event, PipelineStageFlags stageMask ) const;
17722
17723 void waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017724#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017725 void waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017726#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17727
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017728 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017729#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017730 void pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017731#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17732
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017733 void beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017734
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017735 void endQuery( QueryPool queryPool, uint32_t query ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017736
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017737 void resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017738
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017739 void writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017740
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017741 void copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017742
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017743 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017744#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17745 template <typename T>
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017746 void pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017747#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17748
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017749 void beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017750#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017751 void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017752#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17753
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017754 void nextSubpass( SubpassContents contents ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017755
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017756 void endRenderPass() const;
17757
17758 void executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017759#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017760 void executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017761#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17762
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017763 void debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017764#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017765 DebugMarkerMarkerInfoEXT debugMarkerBeginEXT() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017766#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17767
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017768 void debugMarkerEndEXT() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017769
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017770 void debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017771#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017772 DebugMarkerMarkerInfoEXT debugMarkerInsertEXT() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017773#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17774
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017775 void drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017776
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017777 void drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const;
17778
17779 void processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017780#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017781 void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017782#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17783
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017784 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017785#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017786 void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070017787#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17788
Lenny Komowbed9b5c2016-08-11 11:23:15 -060017789#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
17790 explicit
17791#endif
17792 operator VkCommandBuffer() const
17793 {
17794 return m_commandBuffer;
17795 }
17796
17797 explicit operator bool() const
17798 {
17799 return m_commandBuffer != VK_NULL_HANDLE;
17800 }
17801
17802 bool operator!() const
17803 {
17804 return m_commandBuffer == VK_NULL_HANDLE;
17805 }
17806
17807 private:
17808 VkCommandBuffer m_commandBuffer;
17809 };
17810 static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
17811
Mark Lobodzinski36c33862017-02-13 10:15:53 -070017812 VULKAN_HPP_INLINE Result CommandBuffer::begin( const CommandBufferBeginInfo* pBeginInfo ) const
17813 {
17814 return static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( pBeginInfo ) ) );
17815 }
17816#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17817 VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo ) const
17818 {
17819 Result result = static_cast<Result>( vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo*>( &beginInfo ) ) );
17820 return createResultValue( result, "vk::CommandBuffer::begin" );
17821 }
17822#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17823
17824#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17825 VULKAN_HPP_INLINE Result CommandBuffer::end() const
17826 {
17827 return static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
17828 }
17829#else
17830 VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::end() const
17831 {
17832 Result result = static_cast<Result>( vkEndCommandBuffer( m_commandBuffer ) );
17833 return createResultValue( result, "vk::CommandBuffer::end" );
17834 }
17835#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17836
17837#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
17838 VULKAN_HPP_INLINE Result CommandBuffer::reset( CommandBufferResetFlags flags ) const
17839 {
17840 return static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
17841 }
17842#else
17843 VULKAN_HPP_INLINE ResultValueType<void>::type CommandBuffer::reset( CommandBufferResetFlags flags ) const
17844 {
17845 Result result = static_cast<Result>( vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
17846 return createResultValue( result, "vk::CommandBuffer::reset" );
17847 }
17848#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17849
17850 VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( PipelineBindPoint pipelineBindPoint, Pipeline pipeline ) const
17851 {
17852 vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
17853 }
17854
17855 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const Viewport* pViewports ) const
17856 {
17857 vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport*>( pViewports ) );
17858 }
17859#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17860 VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const Viewport> viewports ) const
17861 {
17862 vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast<const VkViewport*>( viewports.data() ) );
17863 }
17864#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17865
17866 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const Rect2D* pScissors ) const
17867 {
17868 vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D*>( pScissors ) );
17869 }
17870#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17871 VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const Rect2D> scissors ) const
17872 {
17873 vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast<const VkRect2D*>( scissors.data() ) );
17874 }
17875#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17876
17877 VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const
17878 {
17879 vkCmdSetLineWidth( m_commandBuffer, lineWidth );
17880 }
17881
17882 VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const
17883 {
17884 vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
17885 }
17886
17887 VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const
17888 {
17889 vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
17890 }
17891
17892 VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const
17893 {
17894 vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
17895 }
17896
17897 VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( StencilFaceFlags faceMask, uint32_t compareMask ) const
17898 {
17899 vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
17900 }
17901
17902 VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( StencilFaceFlags faceMask, uint32_t writeMask ) const
17903 {
17904 vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
17905 }
17906
17907 VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( StencilFaceFlags faceMask, uint32_t reference ) const
17908 {
17909 vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
17910 }
17911
17912 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const
17913 {
17914 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
17915 }
17916#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17917 VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( PipelineBindPoint pipelineBindPoint, PipelineLayout layout, uint32_t firstSet, ArrayProxy<const DescriptorSet> descriptorSets, ArrayProxy<const uint32_t> dynamicOffsets ) const
17918 {
17919 vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() );
17920 }
17921#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17922
17923 VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( Buffer buffer, DeviceSize offset, IndexType indexType ) const
17924 {
17925 vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkIndexType>( indexType ) );
17926 }
17927
17928 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const Buffer* pBuffers, const DeviceSize* pOffsets ) const
17929 {
17930 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer*>( pBuffers ), pOffsets );
17931 }
17932#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17933 VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const Buffer> buffers, ArrayProxy<const DeviceSize> offsets ) const
17934 {
17935#ifdef VULKAN_HPP_NO_EXCEPTIONS
17936 assert( buffers.size() == offsets.size() );
17937#else
17938 if ( buffers.size() != offsets.size() )
17939 {
17940 throw std::logic_error( "vk::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
17941 }
17942#endif // VULKAN_HPP_NO_EXCEPTIONS
17943 vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast<const VkBuffer*>( buffers.data() ), offsets.data() );
17944 }
17945#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17946
17947 VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const
17948 {
17949 vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
17950 }
17951
17952 VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const
17953 {
17954 vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
17955 }
17956
17957 VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17958 {
17959 vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17960 }
17961
17962 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( Buffer buffer, DeviceSize offset, uint32_t drawCount, uint32_t stride ) const
17963 {
17964 vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, drawCount, stride );
17965 }
17966
17967 VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t x, uint32_t y, uint32_t z ) const
17968 {
17969 vkCmdDispatch( m_commandBuffer, x, y, z );
17970 }
17971
17972 VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( Buffer buffer, DeviceSize offset ) const
17973 {
17974 vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset );
17975 }
17976
17977 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, uint32_t regionCount, const BufferCopy* pRegions ) const
17978 {
17979 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy*>( pRegions ) );
17980 }
17981#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17982 VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( Buffer srcBuffer, Buffer dstBuffer, ArrayProxy<const BufferCopy> regions ) const
17983 {
17984 vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferCopy*>( regions.data() ) );
17985 }
17986#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17987
17988 VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageCopy* pRegions ) const
17989 {
17990 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy*>( pRegions ) );
17991 }
17992#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
17993 VULKAN_HPP_INLINE void CommandBuffer::copyImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageCopy> regions ) const
17994 {
17995 vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageCopy*>( regions.data() ) );
17996 }
17997#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
17998
17999 VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageBlit* pRegions, Filter filter ) const
18000 {
18001 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit*>( pRegions ), static_cast<VkFilter>( filter ) );
18002 }
18003#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18004 VULKAN_HPP_INLINE void CommandBuffer::blitImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageBlit> regions, Filter filter ) const
18005 {
18006 vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageBlit*>( regions.data() ), static_cast<VkFilter>( filter ) );
18007 }
18008#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18009
18010 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const BufferImageCopy* pRegions ) const
18011 {
18012 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
18013 }
18014#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18015 VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( Buffer srcBuffer, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const BufferImageCopy> regions ) const
18016 {
18017 vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
18018 }
18019#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18020
18021 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, uint32_t regionCount, const BufferImageCopy* pRegions ) const
18022 {
18023 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy*>( pRegions ) );
18024 }
18025#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18026 VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( Image srcImage, ImageLayout srcImageLayout, Buffer dstBuffer, ArrayProxy<const BufferImageCopy> regions ) const
18027 {
18028 vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size() , reinterpret_cast<const VkBufferImageCopy*>( regions.data() ) );
18029 }
18030#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18031
18032 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize dataSize, const void* pData ) const
18033 {
18034 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, dataSize, pData );
18035 }
18036#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18037 template <typename T>
18038 VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( Buffer dstBuffer, DeviceSize dstOffset, ArrayProxy<const T> data ) const
18039 {
18040 vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, data.size() * sizeof( T ) , reinterpret_cast<const void*>( data.data() ) );
18041 }
18042#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18043
18044 VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( Buffer dstBuffer, DeviceSize dstOffset, DeviceSize size, uint32_t data ) const
18045 {
18046 vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), dstOffset, size, data );
18047 }
18048
18049 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue* pColor, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
18050 {
18051 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
18052 }
18053#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18054 VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( Image image, ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const ImageSubresourceRange> ranges ) const
18055 {
18056 vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue*>( &color ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
18057 }
18058#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18059
18060 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const ImageSubresourceRange* pRanges ) const
18061 {
18062 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange*>( pRanges ) );
18063 }
18064#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18065 VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( Image image, ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const ImageSubresourceRange> ranges ) const
18066 {
18067 vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue*>( &depthStencil ), ranges.size() , reinterpret_cast<const VkImageSubresourceRange*>( ranges.data() ) );
18068 }
18069#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18070
18071 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const ClearAttachment* pAttachments, uint32_t rectCount, const ClearRect* pRects ) const
18072 {
18073 vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment*>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect*>( pRects ) );
18074 }
18075#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18076 VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const ClearAttachment> attachments, ArrayProxy<const ClearRect> rects ) const
18077 {
18078 vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast<const VkClearAttachment*>( attachments.data() ), rects.size() , reinterpret_cast<const VkClearRect*>( rects.data() ) );
18079 }
18080#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18081
18082 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, uint32_t regionCount, const ImageResolve* pRegions ) const
18083 {
18084 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve*>( pRegions ) );
18085 }
18086#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18087 VULKAN_HPP_INLINE void CommandBuffer::resolveImage( Image srcImage, ImageLayout srcImageLayout, Image dstImage, ImageLayout dstImageLayout, ArrayProxy<const ImageResolve> regions ) const
18088 {
18089 vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size() , reinterpret_cast<const VkImageResolve*>( regions.data() ) );
18090 }
18091#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18092
18093 VULKAN_HPP_INLINE void CommandBuffer::setEvent( Event event, PipelineStageFlags stageMask ) const
18094 {
18095 vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18096 }
18097
18098 VULKAN_HPP_INLINE void CommandBuffer::resetEvent( Event event, PipelineStageFlags stageMask ) const
18099 {
18100 vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
18101 }
18102
18103 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const Event* pEvents, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
18104 {
18105 vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent*>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
18106 }
18107#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18108 VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const Event> events, PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
18109 {
18110 vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast<const VkEvent*>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
18111 }
18112#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18113
18114 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const ImageMemoryBarrier* pImageMemoryBarriers ) const
18115 {
18116 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier*>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier*>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier*>( pImageMemoryBarriers ) );
18117 }
18118#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18119 VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( PipelineStageFlags srcStageMask, PipelineStageFlags dstStageMask, DependencyFlags dependencyFlags, ArrayProxy<const MemoryBarrier> memoryBarriers, ArrayProxy<const BufferMemoryBarrier> bufferMemoryBarriers, ArrayProxy<const ImageMemoryBarrier> imageMemoryBarriers ) const
18120 {
18121 vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size() , reinterpret_cast<const VkMemoryBarrier*>( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast<const VkBufferMemoryBarrier*>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast<const VkImageMemoryBarrier*>( imageMemoryBarriers.data() ) );
18122 }
18123#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18124
18125 VULKAN_HPP_INLINE void CommandBuffer::beginQuery( QueryPool queryPool, uint32_t query, QueryControlFlags flags ) const
18126 {
18127 vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
18128 }
18129
18130 VULKAN_HPP_INLINE void CommandBuffer::endQuery( QueryPool queryPool, uint32_t query ) const
18131 {
18132 vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
18133 }
18134
18135 VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const
18136 {
18137 vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
18138 }
18139
18140 VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( PipelineStageFlagBits pipelineStage, QueryPool queryPool, uint32_t query ) const
18141 {
18142 vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
18143 }
18144
18145 VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Buffer dstBuffer, DeviceSize dstOffset, DeviceSize stride, QueryResultFlags flags ) const
18146 {
18147 vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), dstOffset, stride, static_cast<VkQueryResultFlags>( flags ) );
18148 }
18149
18150 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const
18151 {
18152 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
18153 }
18154#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18155 template <typename T>
18156 VULKAN_HPP_INLINE void CommandBuffer::pushConstants( PipelineLayout layout, ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> values ) const
18157 {
18158 vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast<const void*>( values.data() ) );
18159 }
18160#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18161
18162 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo* pRenderPassBegin, SubpassContents contents ) const
18163 {
18164 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
18165 }
18166#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18167 VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, SubpassContents contents ) const
18168 {
18169 vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo*>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
18170 }
18171#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18172
18173 VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( SubpassContents contents ) const
18174 {
18175 vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
18176 }
18177
18178 VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const
18179 {
18180 vkCmdEndRenderPass( m_commandBuffer );
18181 }
18182
18183 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
18184 {
18185 vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
18186 }
18187#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18188 VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const CommandBuffer> commandBuffers ) const
18189 {
18190 vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
18191 }
18192#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18193
18194 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
18195 {
18196 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
18197 }
18198#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18199 VULKAN_HPP_INLINE DebugMarkerMarkerInfoEXT CommandBuffer::debugMarkerBeginEXT() const
18200 {
18201 DebugMarkerMarkerInfoEXT markerInfo;
18202 vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
18203 return markerInfo;
18204 }
18205#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18206
18207 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const
18208 {
18209 vkCmdDebugMarkerEndEXT( m_commandBuffer );
18210 }
18211
18212 VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( DebugMarkerMarkerInfoEXT* pMarkerInfo ) const
18213 {
18214 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( pMarkerInfo ) );
18215 }
18216#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18217 VULKAN_HPP_INLINE DebugMarkerMarkerInfoEXT CommandBuffer::debugMarkerInsertEXT() const
18218 {
18219 DebugMarkerMarkerInfoEXT markerInfo;
18220 vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( &markerInfo ) );
18221 return markerInfo;
18222 }
18223#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18224
18225 VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18226 {
18227 vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18228 }
18229
18230 VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( Buffer buffer, DeviceSize offset, Buffer countBuffer, DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const
18231 {
18232 vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), offset, static_cast<VkBuffer>( countBuffer ), countBufferOffset, maxDrawCount, stride );
18233 }
18234
18235 VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const
18236 {
18237 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( pProcessCommandsInfo ) );
18238 }
18239#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18240 VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo ) const
18241 {
18242 vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>( &processCommandsInfo ) );
18243 }
18244#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18245
18246 VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const
18247 {
18248 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( pReserveSpaceInfo ) );
18249 }
18250#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18251 VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo ) const
18252 {
18253 vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast<const VkCmdReserveSpaceForCommandsInfoNVX*>( &reserveSpaceInfo ) );
18254 }
18255#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018256 struct SubmitInfo
18257 {
18258 SubmitInfo( uint32_t waitSemaphoreCount_ = 0, const Semaphore* pWaitSemaphores_ = nullptr, const PipelineStageFlags* pWaitDstStageMask_ = nullptr, uint32_t commandBufferCount_ = 0, const CommandBuffer* pCommandBuffers_ = nullptr, uint32_t signalSemaphoreCount_ = 0, const Semaphore* pSignalSemaphores_ = nullptr )
18259 : sType( StructureType::eSubmitInfo )
18260 , pNext( nullptr )
18261 , waitSemaphoreCount( waitSemaphoreCount_ )
18262 , pWaitSemaphores( pWaitSemaphores_ )
18263 , pWaitDstStageMask( pWaitDstStageMask_ )
18264 , commandBufferCount( commandBufferCount_ )
18265 , pCommandBuffers( pCommandBuffers_ )
18266 , signalSemaphoreCount( signalSemaphoreCount_ )
18267 , pSignalSemaphores( pSignalSemaphores_ )
18268 {
18269 }
18270
18271 SubmitInfo( VkSubmitInfo const & rhs )
18272 {
18273 memcpy( this, &rhs, sizeof(SubmitInfo) );
18274 }
18275
18276 SubmitInfo& operator=( VkSubmitInfo const & rhs )
18277 {
18278 memcpy( this, &rhs, sizeof(SubmitInfo) );
18279 return *this;
18280 }
18281
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018282 SubmitInfo& setPNext( const void* pNext_ )
18283 {
18284 pNext = pNext_;
18285 return *this;
18286 }
18287
18288 SubmitInfo& setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ )
18289 {
18290 waitSemaphoreCount = waitSemaphoreCount_;
18291 return *this;
18292 }
18293
18294 SubmitInfo& setPWaitSemaphores( const Semaphore* pWaitSemaphores_ )
18295 {
18296 pWaitSemaphores = pWaitSemaphores_;
18297 return *this;
18298 }
18299
18300 SubmitInfo& setPWaitDstStageMask( const PipelineStageFlags* pWaitDstStageMask_ )
18301 {
18302 pWaitDstStageMask = pWaitDstStageMask_;
18303 return *this;
18304 }
18305
18306 SubmitInfo& setCommandBufferCount( uint32_t commandBufferCount_ )
18307 {
18308 commandBufferCount = commandBufferCount_;
18309 return *this;
18310 }
18311
18312 SubmitInfo& setPCommandBuffers( const CommandBuffer* pCommandBuffers_ )
18313 {
18314 pCommandBuffers = pCommandBuffers_;
18315 return *this;
18316 }
18317
18318 SubmitInfo& setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ )
18319 {
18320 signalSemaphoreCount = signalSemaphoreCount_;
18321 return *this;
18322 }
18323
18324 SubmitInfo& setPSignalSemaphores( const Semaphore* pSignalSemaphores_ )
18325 {
18326 pSignalSemaphores = pSignalSemaphores_;
18327 return *this;
18328 }
18329
18330 operator const VkSubmitInfo&() const
18331 {
18332 return *reinterpret_cast<const VkSubmitInfo*>(this);
18333 }
18334
18335 bool operator==( SubmitInfo const& rhs ) const
18336 {
18337 return ( sType == rhs.sType )
18338 && ( pNext == rhs.pNext )
18339 && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
18340 && ( pWaitSemaphores == rhs.pWaitSemaphores )
18341 && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
18342 && ( commandBufferCount == rhs.commandBufferCount )
18343 && ( pCommandBuffers == rhs.pCommandBuffers )
18344 && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
18345 && ( pSignalSemaphores == rhs.pSignalSemaphores );
18346 }
18347
18348 bool operator!=( SubmitInfo const& rhs ) const
18349 {
18350 return !operator==( rhs );
18351 }
18352
18353 private:
18354 StructureType sType;
18355
18356 public:
18357 const void* pNext;
18358 uint32_t waitSemaphoreCount;
18359 const Semaphore* pWaitSemaphores;
18360 const PipelineStageFlags* pWaitDstStageMask;
18361 uint32_t commandBufferCount;
18362 const CommandBuffer* pCommandBuffers;
18363 uint32_t signalSemaphoreCount;
18364 const Semaphore* pSignalSemaphores;
18365 };
18366 static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
18367
18368 class Queue
18369 {
18370 public:
18371 Queue()
18372 : m_queue(VK_NULL_HANDLE)
18373 {}
18374
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070018375 Queue( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018376 : m_queue(VK_NULL_HANDLE)
18377 {}
18378
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018379#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18380 Queue(VkQueue queue)
18381 : m_queue(queue)
18382 {}
18383
18384 Queue& operator=(VkQueue queue)
18385 {
18386 m_queue = queue;
18387 return *this;
18388 }
18389#endif
18390
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070018391 Queue& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018392 {
18393 m_queue = VK_NULL_HANDLE;
18394 return *this;
18395 }
18396
Lenny Komowebf33162016-08-26 14:10:08 -060018397 bool operator==(Queue const &rhs) const
18398 {
18399 return m_queue == rhs.m_queue;
18400 }
18401
18402 bool operator!=(Queue const &rhs) const
18403 {
18404 return m_queue != rhs.m_queue;
18405 }
18406
18407 bool operator<(Queue const &rhs) const
18408 {
18409 return m_queue < rhs.m_queue;
18410 }
18411
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018412 Result submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018413#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018414 ResultValueType<void>::type submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018415#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18416
18417#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018418 Result waitIdle() const;
18419#else
18420 ResultValueType<void>::type waitIdle() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018421#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18422
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018423 Result bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018424#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018425 ResultValueType<void>::type bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018426#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18427
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018428 Result presentKHR( const PresentInfoKHR* pPresentInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018429#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018430 Result presentKHR( const PresentInfoKHR & presentInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018431#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18432
18433#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18434 explicit
18435#endif
18436 operator VkQueue() const
18437 {
18438 return m_queue;
18439 }
18440
18441 explicit operator bool() const
18442 {
18443 return m_queue != VK_NULL_HANDLE;
18444 }
18445
18446 bool operator!() const
18447 {
18448 return m_queue == VK_NULL_HANDLE;
18449 }
18450
18451 private:
18452 VkQueue m_queue;
18453 };
18454 static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
18455
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018456 VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const SubmitInfo* pSubmits, Fence fence ) const
18457 {
18458 return static_cast<Result>( vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo*>( pSubmits ), static_cast<VkFence>( fence ) ) );
18459 }
18460#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18461 VULKAN_HPP_INLINE ResultValueType<void>::type Queue::submit( ArrayProxy<const SubmitInfo> submits, Fence fence ) const
18462 {
18463 Result result = static_cast<Result>( vkQueueSubmit( m_queue, submits.size() , reinterpret_cast<const VkSubmitInfo*>( submits.data() ), static_cast<VkFence>( fence ) ) );
18464 return createResultValue( result, "vk::Queue::submit" );
18465 }
18466#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18467
18468#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
18469 VULKAN_HPP_INLINE Result Queue::waitIdle() const
18470 {
18471 return static_cast<Result>( vkQueueWaitIdle( m_queue ) );
18472 }
18473#else
18474 VULKAN_HPP_INLINE ResultValueType<void>::type Queue::waitIdle() const
18475 {
18476 Result result = static_cast<Result>( vkQueueWaitIdle( m_queue ) );
18477 return createResultValue( result, "vk::Queue::waitIdle" );
18478 }
18479#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18480
18481 VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const BindSparseInfo* pBindInfo, Fence fence ) const
18482 {
18483 return static_cast<Result>( vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo*>( pBindInfo ), static_cast<VkFence>( fence ) ) );
18484 }
18485#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18486 VULKAN_HPP_INLINE ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const BindSparseInfo> bindInfo, Fence fence ) const
18487 {
18488 Result result = static_cast<Result>( vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast<const VkBindSparseInfo*>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
18489 return createResultValue( result, "vk::Queue::bindSparse" );
18490 }
18491#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18492
18493 VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR* pPresentInfo ) const
18494 {
18495 return static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( pPresentInfo ) ) );
18496 }
18497#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18498 VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo ) const
18499 {
18500 Result result = static_cast<Result>( vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR*>( &presentInfo ) ) );
18501 return createResultValue( result, "vk::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } );
18502 }
18503#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18504#ifndef VULKAN_HPP_NO_SMART_HANDLE
18505 class BufferDeleter;
18506 using UniqueBuffer = UniqueHandle<Buffer, BufferDeleter>;
18507 class BufferViewDeleter;
18508 using UniqueBufferView = UniqueHandle<BufferView, BufferViewDeleter>;
18509 class CommandBufferDeleter;
18510 using UniqueCommandBuffer = UniqueHandle<CommandBuffer, CommandBufferDeleter>;
18511 class CommandPoolDeleter;
18512 using UniqueCommandPool = UniqueHandle<CommandPool, CommandPoolDeleter>;
18513 class DescriptorPoolDeleter;
18514 using UniqueDescriptorPool = UniqueHandle<DescriptorPool, DescriptorPoolDeleter>;
18515 class DescriptorSetDeleter;
18516 using UniqueDescriptorSet = UniqueHandle<DescriptorSet, DescriptorSetDeleter>;
18517 class DescriptorSetLayoutDeleter;
18518 using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, DescriptorSetLayoutDeleter>;
18519 class DeviceMemoryDeleter;
18520 using UniqueDeviceMemory = UniqueHandle<DeviceMemory, DeviceMemoryDeleter>;
18521 class EventDeleter;
18522 using UniqueEvent = UniqueHandle<Event, EventDeleter>;
18523 class FenceDeleter;
18524 using UniqueFence = UniqueHandle<Fence, FenceDeleter>;
18525 class FramebufferDeleter;
18526 using UniqueFramebuffer = UniqueHandle<Framebuffer, FramebufferDeleter>;
18527 class ImageDeleter;
18528 using UniqueImage = UniqueHandle<Image, ImageDeleter>;
18529 class ImageViewDeleter;
18530 using UniqueImageView = UniqueHandle<ImageView, ImageViewDeleter>;
18531 class IndirectCommandsLayoutNVXDeleter;
18532 using UniqueIndirectCommandsLayoutNVX = UniqueHandle<IndirectCommandsLayoutNVX, IndirectCommandsLayoutNVXDeleter>;
18533 class ObjectTableNVXDeleter;
18534 using UniqueObjectTableNVX = UniqueHandle<ObjectTableNVX, ObjectTableNVXDeleter>;
18535 class PipelineDeleter;
18536 using UniquePipeline = UniqueHandle<Pipeline, PipelineDeleter>;
18537 class PipelineCacheDeleter;
18538 using UniquePipelineCache = UniqueHandle<PipelineCache, PipelineCacheDeleter>;
18539 class PipelineLayoutDeleter;
18540 using UniquePipelineLayout = UniqueHandle<PipelineLayout, PipelineLayoutDeleter>;
18541 class QueryPoolDeleter;
18542 using UniqueQueryPool = UniqueHandle<QueryPool, QueryPoolDeleter>;
18543 class RenderPassDeleter;
18544 using UniqueRenderPass = UniqueHandle<RenderPass, RenderPassDeleter>;
18545 class SamplerDeleter;
18546 using UniqueSampler = UniqueHandle<Sampler, SamplerDeleter>;
18547 class SemaphoreDeleter;
18548 using UniqueSemaphore = UniqueHandle<Semaphore, SemaphoreDeleter>;
18549 class ShaderModuleDeleter;
18550 using UniqueShaderModule = UniqueHandle<ShaderModule, ShaderModuleDeleter>;
18551 class SwapchainKHRDeleter;
18552 using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, SwapchainKHRDeleter>;
18553#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
18554
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018555 class Device
18556 {
18557 public:
18558 Device()
18559 : m_device(VK_NULL_HANDLE)
18560 {}
18561
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070018562 Device( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018563 : m_device(VK_NULL_HANDLE)
18564 {}
18565
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018566#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
18567 Device(VkDevice device)
18568 : m_device(device)
18569 {}
18570
18571 Device& operator=(VkDevice device)
18572 {
18573 m_device = device;
18574 return *this;
18575 }
18576#endif
18577
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070018578 Device& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018579 {
18580 m_device = VK_NULL_HANDLE;
18581 return *this;
18582 }
18583
Lenny Komowebf33162016-08-26 14:10:08 -060018584 bool operator==(Device const &rhs) const
18585 {
18586 return m_device == rhs.m_device;
18587 }
18588
18589 bool operator!=(Device const &rhs) const
18590 {
18591 return m_device != rhs.m_device;
18592 }
18593
18594 bool operator<(Device const &rhs) const
18595 {
18596 return m_device < rhs.m_device;
18597 }
18598
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018599 PFN_vkVoidFunction getProcAddr( const char* pName ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018600#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018601 PFN_vkVoidFunction getProcAddr( const std::string & name ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018602#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18603
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018604 void destroy( const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018605#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018606 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018607#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18608
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018609 void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018610#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018611 Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018612#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18613
18614#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018615 Result waitIdle() const;
18616#else
18617 ResultValueType<void>::type waitIdle() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018618#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18619
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018620 Result allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018621#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018622 ResultValueType<DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18623#ifndef VULKAN_HPP_NO_SMART_HANDLE
18624 UniqueDeviceMemory allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18625#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018626#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18627
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018628 void freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018629#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018630 void freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18631#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18632
18633 Result mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const;
18634#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18635 ResultValueType<void*>::type mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags = MemoryMapFlags() ) const;
18636#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18637
18638 void unmapMemory( DeviceMemory memory ) const;
18639
18640 Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const;
18641#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18642 ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const;
18643#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18644
18645 Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const;
18646#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18647 ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const;
18648#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18649
18650 void getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const;
18651#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18652 DeviceSize getMemoryCommitment( DeviceMemory memory ) const;
18653#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18654
18655 void getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const;
18656#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18657 MemoryRequirements getBufferMemoryRequirements( Buffer buffer ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018658#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18659
18660#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018661 Result bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const;
18662#else
18663 ResultValueType<void>::type bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const;
18664#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018665
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018666 void getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018667#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018668 MemoryRequirements getImageMemoryRequirements( Image image ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018669#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18670
18671#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018672 Result bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const;
18673#else
18674 ResultValueType<void>::type bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018675#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18676
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018677 void getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018678#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018679 template <typename Allocator = std::allocator<SparseImageMemoryRequirements>>
18680 std::vector<SparseImageMemoryRequirements,Allocator> getImageSparseMemoryRequirements( Image image ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018681#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18682
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018683 Result createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018684#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018685 ResultValueType<Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18686#ifndef VULKAN_HPP_NO_SMART_HANDLE
18687 UniqueFence createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18688#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018689#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18690
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018691 void destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018692#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018693 void destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018694#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18695
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018696 Result resetFences( uint32_t fenceCount, const Fence* pFences ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018697#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018698 ResultValueType<void>::type resetFences( ArrayProxy<const Fence> fences ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018699#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18700
18701#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018702 Result getFenceStatus( Fence fence ) const;
18703#else
18704 Result getFenceStatus( Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018705#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18706
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018707 Result waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018708#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018709 Result waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const;
18710#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18711
18712 Result createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const;
18713#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18714 ResultValueType<Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18715#ifndef VULKAN_HPP_NO_SMART_HANDLE
18716 UniqueSemaphore createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18717#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
18718#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18719
18720 void destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const;
18721#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18722 void destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18723#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18724
18725 Result createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const;
18726#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18727 ResultValueType<Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18728#ifndef VULKAN_HPP_NO_SMART_HANDLE
18729 UniqueEvent createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18730#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
18731#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18732
18733 void destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const;
18734#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18735 void destroyEvent( Event event, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018736#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18737
18738#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018739 Result getEventStatus( Event event ) const;
18740#else
18741 Result getEventStatus( Event event ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018742#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18743
18744#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018745 Result setEvent( Event event ) const;
18746#else
18747 ResultValueType<void>::type setEvent( Event event ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018748#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18749
18750#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018751 Result resetEvent( Event event ) const;
18752#else
18753 ResultValueType<void>::type resetEvent( Event event ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018754#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18755
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018756 Result createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018757#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018758 ResultValueType<QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18759#ifndef VULKAN_HPP_NO_SMART_HANDLE
18760 UniqueQueryPool createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18761#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018762#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18763
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018764 void destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018765#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018766 void destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018767#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18768
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018769 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018770#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
18771 template <typename T>
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018772 Result getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018773#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18774
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018775 Result createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018776#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018777 ResultValueType<Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18778#ifndef VULKAN_HPP_NO_SMART_HANDLE
18779 UniqueBuffer createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18780#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018781#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18782
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018783 void destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018784#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018785 void destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018786#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18787
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018788 Result createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018789#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018790 ResultValueType<BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18791#ifndef VULKAN_HPP_NO_SMART_HANDLE
18792 UniqueBufferView createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18793#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018794#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18795
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018796 void destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018797#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018798 void destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018799#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18800
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018801 Result createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018802#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018803 ResultValueType<Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18804#ifndef VULKAN_HPP_NO_SMART_HANDLE
18805 UniqueImage createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18806#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018807#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18808
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018809 void destroyImage( Image image, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018810#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018811 void destroyImage( Image image, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018812#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18813
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018814 void getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018815#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018816 SubresourceLayout getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018817#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18818
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018819 Result createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018820#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018821 ResultValueType<ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18822#ifndef VULKAN_HPP_NO_SMART_HANDLE
18823 UniqueImageView createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18824#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018825#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18826
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018827 void destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018828#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018829 void destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018830#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18831
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018832 Result createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018833#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018834 ResultValueType<ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18835#ifndef VULKAN_HPP_NO_SMART_HANDLE
18836 UniqueShaderModule createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18837#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018838#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18839
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018840 void destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018841#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018842 void destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018843#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18844
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018845 Result createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018846#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018847 ResultValueType<PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18848#ifndef VULKAN_HPP_NO_SMART_HANDLE
18849 UniquePipelineCache createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18850#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018851#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18852
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018853 void destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018854#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018855 void destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018856#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18857
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018858 Result getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018859#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018860 template <typename Allocator = std::allocator<uint8_t>>
18861 typename ResultValueType<std::vector<uint8_t,Allocator>>::type getPipelineCacheData( PipelineCache pipelineCache ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018862#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18863
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018864 Result mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018865#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018866 ResultValueType<void>::type mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018867#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18868
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018869 Result createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018870#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018871 template <typename Allocator = std::allocator<Pipeline>>
18872 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18873 ResultValueType<Pipeline>::type createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18874#ifndef VULKAN_HPP_NO_SMART_HANDLE
18875 template <typename Allocator = std::allocator<Pipeline>>
18876 std::vector<UniquePipeline> createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18877 UniquePipeline createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18878#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018879#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18880
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018881 Result createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018882#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018883 template <typename Allocator = std::allocator<Pipeline>>
18884 typename ResultValueType<std::vector<Pipeline,Allocator>>::type createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18885 ResultValueType<Pipeline>::type createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18886#ifndef VULKAN_HPP_NO_SMART_HANDLE
18887 template <typename Allocator = std::allocator<Pipeline>>
18888 std::vector<UniquePipeline> createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18889 UniquePipeline createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18890#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018891#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18892
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018893 void destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018894#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018895 void destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018896#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18897
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018898 Result createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018899#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018900 ResultValueType<PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18901#ifndef VULKAN_HPP_NO_SMART_HANDLE
18902 UniquePipelineLayout createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18903#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018904#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18905
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018906 void destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018907#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018908 void destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018909#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18910
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018911 Result createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018912#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018913 ResultValueType<Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18914#ifndef VULKAN_HPP_NO_SMART_HANDLE
18915 UniqueSampler createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18916#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018917#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18918
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018919 void destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018920#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018921 void destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018922#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18923
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018924 Result createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018925#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018926 ResultValueType<DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18927#ifndef VULKAN_HPP_NO_SMART_HANDLE
18928 UniqueDescriptorSetLayout createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18929#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018930#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18931
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018932 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018933#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018934 void destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018935#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18936
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018937 Result createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018938#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018939 ResultValueType<DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18940#ifndef VULKAN_HPP_NO_SMART_HANDLE
18941 UniqueDescriptorPool createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18942#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018943#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18944
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018945 void destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018946#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018947 void destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018948#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18949
18950#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018951 Result resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const;
18952#else
18953 ResultValueType<void>::type resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags = DescriptorPoolResetFlags() ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018954#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18955
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018956 Result allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018957#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018958 template <typename Allocator = std::allocator<DescriptorSet>>
18959 typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const;
18960#ifndef VULKAN_HPP_NO_SMART_HANDLE
18961 template <typename Allocator = std::allocator<DescriptorSet>>
18962 std::vector<UniqueDescriptorSet> allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo ) const;
18963#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018964#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18965
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018966 Result freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018967#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018968 ResultValueType<void>::type freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018969#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18970
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018971 void updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018972#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018973 void updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018974#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18975
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018976 Result createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018977#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018978 ResultValueType<Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18979#ifndef VULKAN_HPP_NO_SMART_HANDLE
18980 UniqueFramebuffer createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18981#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018982#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18983
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018984 void destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018985#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018986 void destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018987#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18988
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018989 Result createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018990#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018991 ResultValueType<RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18992#ifndef VULKAN_HPP_NO_SMART_HANDLE
18993 UniqueRenderPass createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
18994#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018995#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
18996
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018997 void destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060018998#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070018999 void destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019000#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19001
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019002 void getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019003#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019004 Extent2D getRenderAreaGranularity( RenderPass renderPass ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019005#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19006
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019007 Result createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019008#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019009 ResultValueType<CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19010#ifndef VULKAN_HPP_NO_SMART_HANDLE
19011 UniqueCommandPool createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19012#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019013#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19014
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019015 void destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019016#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019017 void destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019018#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19019
19020#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019021 Result resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const;
19022#else
19023 ResultValueType<void>::type resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019024#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19025
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019026 Result allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019027#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019028 template <typename Allocator = std::allocator<CommandBuffer>>
19029 typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const;
19030#ifndef VULKAN_HPP_NO_SMART_HANDLE
19031 template <typename Allocator = std::allocator<CommandBuffer>>
19032 std::vector<UniqueCommandBuffer> allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo ) const;
19033#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019034#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19035
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019036 void freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019037#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019038 void freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019039#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19040
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019041 Result createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019042#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019043 template <typename Allocator = std::allocator<SwapchainKHR>>
19044 typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19045 ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19046#ifndef VULKAN_HPP_NO_SMART_HANDLE
19047 template <typename Allocator = std::allocator<SwapchainKHR>>
19048 std::vector<UniqueSwapchainKHR> createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19049 UniqueSwapchainKHR createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19050#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019051#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19052
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019053 Result createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019054#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019055 ResultValueType<SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19056#ifndef VULKAN_HPP_NO_SMART_HANDLE
19057 UniqueSwapchainKHR createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19058#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019059#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19060
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019061 void destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019062#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019063 void destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019064#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19065
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019066 Result getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019067#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019068 template <typename Allocator = std::allocator<Image>>
19069 typename ResultValueType<std::vector<Image,Allocator>>::type getSwapchainImagesKHR( SwapchainKHR swapchain ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019070#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19071
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019072 Result acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019073#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019074 ResultValue<uint32_t> acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019075#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19076
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019077 Result debugMarkerSetObjectNameEXT( DebugMarkerObjectNameInfoEXT* pNameInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019078#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019079 ResultValueType<DebugMarkerObjectNameInfoEXT>::type debugMarkerSetObjectNameEXT() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019080#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19081
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019082 Result debugMarkerSetObjectTagEXT( DebugMarkerObjectTagInfoEXT* pTagInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019083#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019084 ResultValueType<DebugMarkerObjectTagInfoEXT>::type debugMarkerSetObjectTagEXT() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019085#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19086
Lenny Komow6501c122016-08-31 15:03:49 -060019087#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019088 Result getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const;
19089#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19090 ResultValueType<HANDLE>::type getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const;
19091#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komow6501c122016-08-31 15:03:49 -060019092#endif /*VK_USE_PLATFORM_WIN32_KHR*/
19093
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019094 Result createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const;
Lenny Komow6501c122016-08-31 15:03:49 -060019095#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019096 ResultValueType<IndirectCommandsLayoutNVX>::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19097#ifndef VULKAN_HPP_NO_SMART_HANDLE
19098 UniqueIndirectCommandsLayoutNVX createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19099#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komow6501c122016-08-31 15:03:49 -060019100#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19101
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019102 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019103#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019104 void destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019105#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19106
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019107 Result createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019108#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019109 ResultValueType<ObjectTableNVX>::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19110#ifndef VULKAN_HPP_NO_SMART_HANDLE
19111 UniqueObjectTableNVX createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
19112#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019113#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19114
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019115 void destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019116#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019117 void destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019118#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19119
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019120 Result registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019121#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019122 ResultValueType<void>::type registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019123#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19124
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019125 Result unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019126#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019127 ResultValueType<void>::type unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019128#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19129
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019130 void trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019131
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019132 Result displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019133#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019134 ResultValueType<void>::type displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo ) const;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070019135#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19136
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019137 Result registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
Mark Young39389872017-01-19 21:10:49 -070019138#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019139 ResultValueType<Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, const AllocationCallbacks & allocator ) const;
Mark Young39389872017-01-19 21:10:49 -070019140#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19141
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019142 Result registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const;
Mark Young39389872017-01-19 21:10:49 -070019143#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019144 ResultValueType<Fence>::type registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, const AllocationCallbacks & allocator ) const;
Mark Young39389872017-01-19 21:10:49 -070019145#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19146
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019147 Result getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const;
Mark Young39389872017-01-19 21:10:49 -070019148#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019149 ResultValue<uint64_t> getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter ) const;
Mark Young39389872017-01-19 21:10:49 -070019150#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19151
Lenny Komowbed9b5c2016-08-11 11:23:15 -060019152#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
19153 explicit
19154#endif
19155 operator VkDevice() const
19156 {
19157 return m_device;
19158 }
19159
19160 explicit operator bool() const
19161 {
19162 return m_device != VK_NULL_HANDLE;
19163 }
19164
19165 bool operator!() const
19166 {
19167 return m_device == VK_NULL_HANDLE;
19168 }
19169
19170 private:
19171 VkDevice m_device;
19172 };
19173 static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
19174
Mark Lobodzinski36c33862017-02-13 10:15:53 -070019175#ifndef VULKAN_HPP_NO_SMART_HANDLE
19176 class BufferDeleter
19177 {
19178 public:
19179 BufferDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19180 : m_device( device )
19181 , m_allocator( allocator )
19182 {}
19183
19184 void operator()( Buffer buffer )
19185 {
19186 m_device.destroyBuffer( buffer, m_allocator );
19187 }
19188
19189 private:
19190 Device m_device;
19191 Optional<const AllocationCallbacks> m_allocator;
19192 };
19193
19194 class BufferViewDeleter
19195 {
19196 public:
19197 BufferViewDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19198 : m_device( device )
19199 , m_allocator( allocator )
19200 {}
19201
19202 void operator()( BufferView bufferView )
19203 {
19204 m_device.destroyBufferView( bufferView, m_allocator );
19205 }
19206
19207 private:
19208 Device m_device;
19209 Optional<const AllocationCallbacks> m_allocator;
19210 };
19211
19212 class CommandBufferDeleter
19213 {
19214 public:
19215 CommandBufferDeleter( Device device = Device(), CommandPool commandPool = CommandPool() )
19216 : m_device( device )
19217 , m_commandPool( commandPool )
19218 {}
19219
19220 void operator()( CommandBuffer commandBuffer )
19221 {
19222 m_device.freeCommandBuffers( m_commandPool, commandBuffer );
19223 }
19224
19225 private:
19226 Device m_device;
19227 CommandPool m_commandPool;
19228 };
19229
19230 class CommandPoolDeleter
19231 {
19232 public:
19233 CommandPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19234 : m_device( device )
19235 , m_allocator( allocator )
19236 {}
19237
19238 void operator()( CommandPool commandPool )
19239 {
19240 m_device.destroyCommandPool( commandPool, m_allocator );
19241 }
19242
19243 private:
19244 Device m_device;
19245 Optional<const AllocationCallbacks> m_allocator;
19246 };
19247
19248 class DescriptorPoolDeleter
19249 {
19250 public:
19251 DescriptorPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19252 : m_device( device )
19253 , m_allocator( allocator )
19254 {}
19255
19256 void operator()( DescriptorPool descriptorPool )
19257 {
19258 m_device.destroyDescriptorPool( descriptorPool, m_allocator );
19259 }
19260
19261 private:
19262 Device m_device;
19263 Optional<const AllocationCallbacks> m_allocator;
19264 };
19265
19266 class DescriptorSetDeleter
19267 {
19268 public:
19269 DescriptorSetDeleter( Device device = Device(), DescriptorPool descriptorPool = DescriptorPool() )
19270 : m_device( device )
19271 , m_descriptorPool( descriptorPool )
19272 {}
19273
19274 void operator()( DescriptorSet descriptorSet )
19275 {
19276 m_device.freeDescriptorSets( m_descriptorPool, descriptorSet );
19277 }
19278
19279 private:
19280 Device m_device;
19281 DescriptorPool m_descriptorPool;
19282 };
19283
19284 class DescriptorSetLayoutDeleter
19285 {
19286 public:
19287 DescriptorSetLayoutDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19288 : m_device( device )
19289 , m_allocator( allocator )
19290 {}
19291
19292 void operator()( DescriptorSetLayout descriptorSetLayout )
19293 {
19294 m_device.destroyDescriptorSetLayout( descriptorSetLayout, m_allocator );
19295 }
19296
19297 private:
19298 Device m_device;
19299 Optional<const AllocationCallbacks> m_allocator;
19300 };
19301
19302 class DeviceMemoryDeleter
19303 {
19304 public:
19305 DeviceMemoryDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19306 : m_device( device )
19307 , m_allocator( allocator )
19308 {}
19309
19310 void operator()( DeviceMemory deviceMemory )
19311 {
19312 m_device.freeMemory( deviceMemory, m_allocator );
19313 }
19314
19315 private:
19316 Device m_device;
19317 Optional<const AllocationCallbacks> m_allocator;
19318 };
19319
19320 class EventDeleter
19321 {
19322 public:
19323 EventDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19324 : m_device( device )
19325 , m_allocator( allocator )
19326 {}
19327
19328 void operator()( Event event )
19329 {
19330 m_device.destroyEvent( event, m_allocator );
19331 }
19332
19333 private:
19334 Device m_device;
19335 Optional<const AllocationCallbacks> m_allocator;
19336 };
19337
19338 class FenceDeleter
19339 {
19340 public:
19341 FenceDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19342 : m_device( device )
19343 , m_allocator( allocator )
19344 {}
19345
19346 void operator()( Fence fence )
19347 {
19348 m_device.destroyFence( fence, m_allocator );
19349 }
19350
19351 private:
19352 Device m_device;
19353 Optional<const AllocationCallbacks> m_allocator;
19354 };
19355
19356 class FramebufferDeleter
19357 {
19358 public:
19359 FramebufferDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19360 : m_device( device )
19361 , m_allocator( allocator )
19362 {}
19363
19364 void operator()( Framebuffer framebuffer )
19365 {
19366 m_device.destroyFramebuffer( framebuffer, m_allocator );
19367 }
19368
19369 private:
19370 Device m_device;
19371 Optional<const AllocationCallbacks> m_allocator;
19372 };
19373
19374 class ImageDeleter
19375 {
19376 public:
19377 ImageDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19378 : m_device( device )
19379 , m_allocator( allocator )
19380 {}
19381
19382 void operator()( Image image )
19383 {
19384 m_device.destroyImage( image, m_allocator );
19385 }
19386
19387 private:
19388 Device m_device;
19389 Optional<const AllocationCallbacks> m_allocator;
19390 };
19391
19392 class ImageViewDeleter
19393 {
19394 public:
19395 ImageViewDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19396 : m_device( device )
19397 , m_allocator( allocator )
19398 {}
19399
19400 void operator()( ImageView imageView )
19401 {
19402 m_device.destroyImageView( imageView, m_allocator );
19403 }
19404
19405 private:
19406 Device m_device;
19407 Optional<const AllocationCallbacks> m_allocator;
19408 };
19409
19410 class IndirectCommandsLayoutNVXDeleter
19411 {
19412 public:
19413 IndirectCommandsLayoutNVXDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19414 : m_device( device )
19415 , m_allocator( allocator )
19416 {}
19417
19418 void operator()( IndirectCommandsLayoutNVX indirectCommandsLayoutNVX )
19419 {
19420 m_device.destroyIndirectCommandsLayoutNVX( indirectCommandsLayoutNVX, m_allocator );
19421 }
19422
19423 private:
19424 Device m_device;
19425 Optional<const AllocationCallbacks> m_allocator;
19426 };
19427
19428 class ObjectTableNVXDeleter
19429 {
19430 public:
19431 ObjectTableNVXDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19432 : m_device( device )
19433 , m_allocator( allocator )
19434 {}
19435
19436 void operator()( ObjectTableNVX objectTableNVX )
19437 {
19438 m_device.destroyObjectTableNVX( objectTableNVX, m_allocator );
19439 }
19440
19441 private:
19442 Device m_device;
19443 Optional<const AllocationCallbacks> m_allocator;
19444 };
19445
19446 class PipelineDeleter
19447 {
19448 public:
19449 PipelineDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19450 : m_device( device )
19451 , m_allocator( allocator )
19452 {}
19453
19454 void operator()( Pipeline pipeline )
19455 {
19456 m_device.destroyPipeline( pipeline, m_allocator );
19457 }
19458
19459 private:
19460 Device m_device;
19461 Optional<const AllocationCallbacks> m_allocator;
19462 };
19463
19464 class PipelineCacheDeleter
19465 {
19466 public:
19467 PipelineCacheDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19468 : m_device( device )
19469 , m_allocator( allocator )
19470 {}
19471
19472 void operator()( PipelineCache pipelineCache )
19473 {
19474 m_device.destroyPipelineCache( pipelineCache, m_allocator );
19475 }
19476
19477 private:
19478 Device m_device;
19479 Optional<const AllocationCallbacks> m_allocator;
19480 };
19481
19482 class PipelineLayoutDeleter
19483 {
19484 public:
19485 PipelineLayoutDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19486 : m_device( device )
19487 , m_allocator( allocator )
19488 {}
19489
19490 void operator()( PipelineLayout pipelineLayout )
19491 {
19492 m_device.destroyPipelineLayout( pipelineLayout, m_allocator );
19493 }
19494
19495 private:
19496 Device m_device;
19497 Optional<const AllocationCallbacks> m_allocator;
19498 };
19499
19500 class QueryPoolDeleter
19501 {
19502 public:
19503 QueryPoolDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19504 : m_device( device )
19505 , m_allocator( allocator )
19506 {}
19507
19508 void operator()( QueryPool queryPool )
19509 {
19510 m_device.destroyQueryPool( queryPool, m_allocator );
19511 }
19512
19513 private:
19514 Device m_device;
19515 Optional<const AllocationCallbacks> m_allocator;
19516 };
19517
19518 class RenderPassDeleter
19519 {
19520 public:
19521 RenderPassDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19522 : m_device( device )
19523 , m_allocator( allocator )
19524 {}
19525
19526 void operator()( RenderPass renderPass )
19527 {
19528 m_device.destroyRenderPass( renderPass, m_allocator );
19529 }
19530
19531 private:
19532 Device m_device;
19533 Optional<const AllocationCallbacks> m_allocator;
19534 };
19535
19536 class SamplerDeleter
19537 {
19538 public:
19539 SamplerDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19540 : m_device( device )
19541 , m_allocator( allocator )
19542 {}
19543
19544 void operator()( Sampler sampler )
19545 {
19546 m_device.destroySampler( sampler, m_allocator );
19547 }
19548
19549 private:
19550 Device m_device;
19551 Optional<const AllocationCallbacks> m_allocator;
19552 };
19553
19554 class SemaphoreDeleter
19555 {
19556 public:
19557 SemaphoreDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19558 : m_device( device )
19559 , m_allocator( allocator )
19560 {}
19561
19562 void operator()( Semaphore semaphore )
19563 {
19564 m_device.destroySemaphore( semaphore, m_allocator );
19565 }
19566
19567 private:
19568 Device m_device;
19569 Optional<const AllocationCallbacks> m_allocator;
19570 };
19571
19572 class ShaderModuleDeleter
19573 {
19574 public:
19575 ShaderModuleDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19576 : m_device( device )
19577 , m_allocator( allocator )
19578 {}
19579
19580 void operator()( ShaderModule shaderModule )
19581 {
19582 m_device.destroyShaderModule( shaderModule, m_allocator );
19583 }
19584
19585 private:
19586 Device m_device;
19587 Optional<const AllocationCallbacks> m_allocator;
19588 };
19589
19590 class SwapchainKHRDeleter
19591 {
19592 public:
19593 SwapchainKHRDeleter( Device device = Device(), Optional<const AllocationCallbacks> allocator = nullptr )
19594 : m_device( device )
19595 , m_allocator( allocator )
19596 {}
19597
19598 void operator()( SwapchainKHR swapchainKHR )
19599 {
19600 m_device.destroySwapchainKHR( swapchainKHR, m_allocator );
19601 }
19602
19603 private:
19604 Device m_device;
19605 Optional<const AllocationCallbacks> m_allocator;
19606 };
19607#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19608
19609 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName ) const
19610 {
19611 return vkGetDeviceProcAddr( m_device, pName );
19612 }
19613#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19614 VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const
19615 {
19616 return vkGetDeviceProcAddr( m_device, name.c_str() );
19617 }
19618#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19619
19620 VULKAN_HPP_INLINE void Device::destroy( const AllocationCallbacks* pAllocator ) const
19621 {
19622 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19623 }
19624#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19625 VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator ) const
19626 {
19627 vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19628 }
19629#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19630
19631 VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Queue* pQueue ) const
19632 {
19633 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( pQueue ) );
19634 }
19635#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19636 VULKAN_HPP_INLINE Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
19637 {
19638 Queue queue;
19639 vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue*>( &queue ) );
19640 return queue;
19641 }
19642#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19643
19644#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19645 VULKAN_HPP_INLINE Result Device::waitIdle() const
19646 {
19647 return static_cast<Result>( vkDeviceWaitIdle( m_device ) );
19648 }
19649#else
19650 VULKAN_HPP_INLINE ResultValueType<void>::type Device::waitIdle() const
19651 {
19652 Result result = static_cast<Result>( vkDeviceWaitIdle( m_device ) );
19653 return createResultValue( result, "vk::Device::waitIdle" );
19654 }
19655#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19656
19657 VULKAN_HPP_INLINE Result Device::allocateMemory( const MemoryAllocateInfo* pAllocateInfo, const AllocationCallbacks* pAllocator, DeviceMemory* pMemory ) const
19658 {
19659 return static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDeviceMemory*>( pMemory ) ) );
19660 }
19661#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19662 VULKAN_HPP_INLINE ResultValueType<DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator ) const
19663 {
19664 DeviceMemory memory;
19665 Result result = static_cast<Result>( vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo*>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDeviceMemory*>( &memory ) ) );
19666 return createResultValue( result, memory, "vk::Device::allocateMemory" );
19667 }
19668#ifndef VULKAN_HPP_NO_SMART_HANDLE
19669 VULKAN_HPP_INLINE UniqueDeviceMemory Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator ) const
19670 {
19671 DeviceMemoryDeleter deleter( *this, allocator );
19672 return UniqueDeviceMemory( allocateMemory( allocateInfo, allocator ), deleter );
19673 }
19674#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19675#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19676
19677 VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, const AllocationCallbacks* pAllocator ) const
19678 {
19679 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19680 }
19681#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19682 VULKAN_HPP_INLINE void Device::freeMemory( DeviceMemory memory, Optional<const AllocationCallbacks> allocator ) const
19683 {
19684 vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19685 }
19686#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19687
19688 VULKAN_HPP_INLINE Result Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags, void** ppData ) const
19689 {
19690 return static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), ppData ) );
19691 }
19692#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19693 VULKAN_HPP_INLINE ResultValueType<void*>::type Device::mapMemory( DeviceMemory memory, DeviceSize offset, DeviceSize size, MemoryMapFlags flags ) const
19694 {
19695 void* pData;
19696 Result result = static_cast<Result>( vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), offset, size, static_cast<VkMemoryMapFlags>( flags ), &pData ) );
19697 return createResultValue( result, pData, "vk::Device::mapMemory" );
19698 }
19699#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19700
19701 VULKAN_HPP_INLINE void Device::unmapMemory( DeviceMemory memory ) const
19702 {
19703 vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
19704 }
19705
19706 VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
19707 {
19708 return static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
19709 }
19710#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19711 VULKAN_HPP_INLINE ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
19712 {
19713 Result result = static_cast<Result>( vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
19714 return createResultValue( result, "vk::Device::flushMappedMemoryRanges" );
19715 }
19716#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19717
19718 VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const MappedMemoryRange* pMemoryRanges ) const
19719 {
19720 return static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange*>( pMemoryRanges ) ) );
19721 }
19722#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19723 VULKAN_HPP_INLINE ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const MappedMemoryRange> memoryRanges ) const
19724 {
19725 Result result = static_cast<Result>( vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast<const VkMappedMemoryRange*>( memoryRanges.data() ) ) );
19726 return createResultValue( result, "vk::Device::invalidateMappedMemoryRanges" );
19727 }
19728#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19729
19730 VULKAN_HPP_INLINE void Device::getMemoryCommitment( DeviceMemory memory, DeviceSize* pCommittedMemoryInBytes ) const
19731 {
19732 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), pCommittedMemoryInBytes );
19733 }
19734#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19735 VULKAN_HPP_INLINE DeviceSize Device::getMemoryCommitment( DeviceMemory memory ) const
19736 {
19737 DeviceSize committedMemoryInBytes;
19738 vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), &committedMemoryInBytes );
19739 return committedMemoryInBytes;
19740 }
19741#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19742
19743 VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( Buffer buffer, MemoryRequirements* pMemoryRequirements ) const
19744 {
19745 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
19746 }
19747#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19748 VULKAN_HPP_INLINE MemoryRequirements Device::getBufferMemoryRequirements( Buffer buffer ) const
19749 {
19750 MemoryRequirements memoryRequirements;
19751 vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
19752 return memoryRequirements;
19753 }
19754#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19755
19756#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19757 VULKAN_HPP_INLINE Result Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
19758 {
19759 return static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
19760 }
19761#else
19762 VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindBufferMemory( Buffer buffer, DeviceMemory memory, DeviceSize memoryOffset ) const
19763 {
19764 Result result = static_cast<Result>( vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
19765 return createResultValue( result, "vk::Device::bindBufferMemory" );
19766 }
19767#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19768
19769 VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( Image image, MemoryRequirements* pMemoryRequirements ) const
19770 {
19771 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( pMemoryRequirements ) );
19772 }
19773#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19774 VULKAN_HPP_INLINE MemoryRequirements Device::getImageMemoryRequirements( Image image ) const
19775 {
19776 MemoryRequirements memoryRequirements;
19777 vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements*>( &memoryRequirements ) );
19778 return memoryRequirements;
19779 }
19780#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19781
19782#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19783 VULKAN_HPP_INLINE Result Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
19784 {
19785 return static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
19786 }
19787#else
19788 VULKAN_HPP_INLINE ResultValueType<void>::type Device::bindImageMemory( Image image, DeviceMemory memory, DeviceSize memoryOffset ) const
19789 {
19790 Result result = static_cast<Result>( vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), memoryOffset ) );
19791 return createResultValue( result, "vk::Device::bindImageMemory" );
19792 }
19793#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19794
19795 VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( Image image, uint32_t* pSparseMemoryRequirementCount, SparseImageMemoryRequirements* pSparseMemoryRequirements ) const
19796 {
19797 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( pSparseMemoryRequirements ) );
19798 }
19799#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19800 template <typename Allocator>
19801 VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements,Allocator> Device::getImageSparseMemoryRequirements( Image image ) const
19802 {
19803 std::vector<SparseImageMemoryRequirements,Allocator> sparseMemoryRequirements;
19804 uint32_t sparseMemoryRequirementCount;
19805 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
19806 sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
19807 vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements*>( sparseMemoryRequirements.data() ) );
19808 return sparseMemoryRequirements;
19809 }
19810#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19811
19812 VULKAN_HPP_INLINE Result Device::createFence( const FenceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
19813 {
19814 return static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
19815 }
19816#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19817 VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19818 {
19819 Fence fence;
19820 Result result = static_cast<Result>( vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFence*>( &fence ) ) );
19821 return createResultValue( result, fence, "vk::Device::createFence" );
19822 }
19823#ifndef VULKAN_HPP_NO_SMART_HANDLE
19824 VULKAN_HPP_INLINE UniqueFence Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19825 {
19826 FenceDeleter deleter( *this, allocator );
19827 return UniqueFence( createFence( createInfo, allocator ), deleter );
19828 }
19829#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19830#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19831
19832 VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, const AllocationCallbacks* pAllocator ) const
19833 {
19834 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19835 }
19836#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19837 VULKAN_HPP_INLINE void Device::destroyFence( Fence fence, Optional<const AllocationCallbacks> allocator ) const
19838 {
19839 vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19840 }
19841#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19842
19843 VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const Fence* pFences ) const
19844 {
19845 return static_cast<Result>( vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ) ) );
19846 }
19847#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19848 VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetFences( ArrayProxy<const Fence> fences ) const
19849 {
19850 Result result = static_cast<Result>( vkResetFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ) ) );
19851 return createResultValue( result, "vk::Device::resetFences" );
19852 }
19853#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19854
19855#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19856 VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence ) const
19857 {
19858 return static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
19859 }
19860#else
19861 VULKAN_HPP_INLINE Result Device::getFenceStatus( Fence fence ) const
19862 {
19863 Result result = static_cast<Result>( vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
19864 return createResultValue( result, "vk::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } );
19865 }
19866#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19867
19868 VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const Fence* pFences, Bool32 waitAll, uint64_t timeout ) const
19869 {
19870 return static_cast<Result>( vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence*>( pFences ), waitAll, timeout ) );
19871 }
19872#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19873 VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const Fence> fences, Bool32 waitAll, uint64_t timeout ) const
19874 {
19875 Result result = static_cast<Result>( vkWaitForFences( m_device, fences.size() , reinterpret_cast<const VkFence*>( fences.data() ), waitAll, timeout ) );
19876 return createResultValue( result, "vk::Device::waitForFences", { Result::eSuccess, Result::eTimeout } );
19877 }
19878#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19879
19880 VULKAN_HPP_INLINE Result Device::createSemaphore( const SemaphoreCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Semaphore* pSemaphore ) const
19881 {
19882 return static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSemaphore*>( pSemaphore ) ) );
19883 }
19884#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19885 VULKAN_HPP_INLINE ResultValueType<Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19886 {
19887 Semaphore semaphore;
19888 Result result = static_cast<Result>( vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSemaphore*>( &semaphore ) ) );
19889 return createResultValue( result, semaphore, "vk::Device::createSemaphore" );
19890 }
19891#ifndef VULKAN_HPP_NO_SMART_HANDLE
19892 VULKAN_HPP_INLINE UniqueSemaphore Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19893 {
19894 SemaphoreDeleter deleter( *this, allocator );
19895 return UniqueSemaphore( createSemaphore( createInfo, allocator ), deleter );
19896 }
19897#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19898#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19899
19900 VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, const AllocationCallbacks* pAllocator ) const
19901 {
19902 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19903 }
19904#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19905 VULKAN_HPP_INLINE void Device::destroySemaphore( Semaphore semaphore, Optional<const AllocationCallbacks> allocator ) const
19906 {
19907 vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19908 }
19909#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19910
19911 VULKAN_HPP_INLINE Result Device::createEvent( const EventCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Event* pEvent ) const
19912 {
19913 return static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkEvent*>( pEvent ) ) );
19914 }
19915#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19916 VULKAN_HPP_INLINE ResultValueType<Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19917 {
19918 Event event;
19919 Result result = static_cast<Result>( vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkEvent*>( &event ) ) );
19920 return createResultValue( result, event, "vk::Device::createEvent" );
19921 }
19922#ifndef VULKAN_HPP_NO_SMART_HANDLE
19923 VULKAN_HPP_INLINE UniqueEvent Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19924 {
19925 EventDeleter deleter( *this, allocator );
19926 return UniqueEvent( createEvent( createInfo, allocator ), deleter );
19927 }
19928#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19929#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19930
19931 VULKAN_HPP_INLINE void Device::destroyEvent( Event event, const AllocationCallbacks* pAllocator ) const
19932 {
19933 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
19934 }
19935#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19936 VULKAN_HPP_INLINE void Device::destroyEvent( Event event, Optional<const AllocationCallbacks> allocator ) const
19937 {
19938 vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
19939 }
19940#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19941
19942#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19943 VULKAN_HPP_INLINE Result Device::getEventStatus( Event event ) const
19944 {
19945 return static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
19946 }
19947#else
19948 VULKAN_HPP_INLINE Result Device::getEventStatus( Event event ) const
19949 {
19950 Result result = static_cast<Result>( vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
19951 return createResultValue( result, "vk::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } );
19952 }
19953#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19954
19955#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19956 VULKAN_HPP_INLINE Result Device::setEvent( Event event ) const
19957 {
19958 return static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
19959 }
19960#else
19961 VULKAN_HPP_INLINE ResultValueType<void>::type Device::setEvent( Event event ) const
19962 {
19963 Result result = static_cast<Result>( vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
19964 return createResultValue( result, "vk::Device::setEvent" );
19965 }
19966#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19967
19968#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
19969 VULKAN_HPP_INLINE Result Device::resetEvent( Event event ) const
19970 {
19971 return static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
19972 }
19973#else
19974 VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetEvent( Event event ) const
19975 {
19976 Result result = static_cast<Result>( vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
19977 return createResultValue( result, "vk::Device::resetEvent" );
19978 }
19979#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
19980
19981 VULKAN_HPP_INLINE Result Device::createQueryPool( const QueryPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, QueryPool* pQueryPool ) const
19982 {
19983 return static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkQueryPool*>( pQueryPool ) ) );
19984 }
19985#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
19986 VULKAN_HPP_INLINE ResultValueType<QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19987 {
19988 QueryPool queryPool;
19989 Result result = static_cast<Result>( vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkQueryPool*>( &queryPool ) ) );
19990 return createResultValue( result, queryPool, "vk::Device::createQueryPool" );
19991 }
19992#ifndef VULKAN_HPP_NO_SMART_HANDLE
19993 VULKAN_HPP_INLINE UniqueQueryPool Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
19994 {
19995 QueryPoolDeleter deleter( *this, allocator );
19996 return UniqueQueryPool( createQueryPool( createInfo, allocator ), deleter );
19997 }
19998#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
19999#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20000
20001 VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, const AllocationCallbacks* pAllocator ) const
20002 {
20003 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20004 }
20005#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20006 VULKAN_HPP_INLINE void Device::destroyQueryPool( QueryPool queryPool, Optional<const AllocationCallbacks> allocator ) const
20007 {
20008 vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20009 }
20010#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20011
20012 VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, DeviceSize stride, QueryResultFlags flags ) const
20013 {
20014 return static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, stride, static_cast<VkQueryResultFlags>( flags ) ) );
20015 }
20016#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20017 template <typename T>
20018 VULKAN_HPP_INLINE Result Device::getQueryPoolResults( QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> data, DeviceSize stride, QueryResultFlags flags ) const
20019 {
20020 Result result = static_cast<Result>( vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride, static_cast<VkQueryResultFlags>( flags ) ) );
20021 return createResultValue( result, "vk::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
20022 }
20023#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20024
20025 VULKAN_HPP_INLINE Result Device::createBuffer( const BufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Buffer* pBuffer ) const
20026 {
20027 return static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBuffer*>( pBuffer ) ) );
20028 }
20029#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20030 VULKAN_HPP_INLINE ResultValueType<Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20031 {
20032 Buffer buffer;
20033 Result result = static_cast<Result>( vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBuffer*>( &buffer ) ) );
20034 return createResultValue( result, buffer, "vk::Device::createBuffer" );
20035 }
20036#ifndef VULKAN_HPP_NO_SMART_HANDLE
20037 VULKAN_HPP_INLINE UniqueBuffer Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20038 {
20039 BufferDeleter deleter( *this, allocator );
20040 return UniqueBuffer( createBuffer( createInfo, allocator ), deleter );
20041 }
20042#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20043#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20044
20045 VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, const AllocationCallbacks* pAllocator ) const
20046 {
20047 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20048 }
20049#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20050 VULKAN_HPP_INLINE void Device::destroyBuffer( Buffer buffer, Optional<const AllocationCallbacks> allocator ) const
20051 {
20052 vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20053 }
20054#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20055
20056 VULKAN_HPP_INLINE Result Device::createBufferView( const BufferViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, BufferView* pView ) const
20057 {
20058 return static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkBufferView*>( pView ) ) );
20059 }
20060#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20061 VULKAN_HPP_INLINE ResultValueType<BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20062 {
20063 BufferView view;
20064 Result result = static_cast<Result>( vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkBufferView*>( &view ) ) );
20065 return createResultValue( result, view, "vk::Device::createBufferView" );
20066 }
20067#ifndef VULKAN_HPP_NO_SMART_HANDLE
20068 VULKAN_HPP_INLINE UniqueBufferView Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20069 {
20070 BufferViewDeleter deleter( *this, allocator );
20071 return UniqueBufferView( createBufferView( createInfo, allocator ), deleter );
20072 }
20073#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20074#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20075
20076 VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, const AllocationCallbacks* pAllocator ) const
20077 {
20078 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20079 }
20080#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20081 VULKAN_HPP_INLINE void Device::destroyBufferView( BufferView bufferView, Optional<const AllocationCallbacks> allocator ) const
20082 {
20083 vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20084 }
20085#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20086
20087 VULKAN_HPP_INLINE Result Device::createImage( const ImageCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Image* pImage ) const
20088 {
20089 return static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImage*>( pImage ) ) );
20090 }
20091#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20092 VULKAN_HPP_INLINE ResultValueType<Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20093 {
20094 Image image;
20095 Result result = static_cast<Result>( vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImage*>( &image ) ) );
20096 return createResultValue( result, image, "vk::Device::createImage" );
20097 }
20098#ifndef VULKAN_HPP_NO_SMART_HANDLE
20099 VULKAN_HPP_INLINE UniqueImage Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20100 {
20101 ImageDeleter deleter( *this, allocator );
20102 return UniqueImage( createImage( createInfo, allocator ), deleter );
20103 }
20104#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20105#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20106
20107 VULKAN_HPP_INLINE void Device::destroyImage( Image image, const AllocationCallbacks* pAllocator ) const
20108 {
20109 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20110 }
20111#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20112 VULKAN_HPP_INLINE void Device::destroyImage( Image image, Optional<const AllocationCallbacks> allocator ) const
20113 {
20114 vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20115 }
20116#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20117
20118 VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( Image image, const ImageSubresource* pSubresource, SubresourceLayout* pLayout ) const
20119 {
20120 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( pSubresource ), reinterpret_cast<VkSubresourceLayout*>( pLayout ) );
20121 }
20122#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20123 VULKAN_HPP_INLINE SubresourceLayout Device::getImageSubresourceLayout( Image image, const ImageSubresource & subresource ) const
20124 {
20125 SubresourceLayout layout;
20126 vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource*>( &subresource ), reinterpret_cast<VkSubresourceLayout*>( &layout ) );
20127 return layout;
20128 }
20129#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20130
20131 VULKAN_HPP_INLINE Result Device::createImageView( const ImageViewCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ImageView* pView ) const
20132 {
20133 return static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkImageView*>( pView ) ) );
20134 }
20135#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20136 VULKAN_HPP_INLINE ResultValueType<ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20137 {
20138 ImageView view;
20139 Result result = static_cast<Result>( vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkImageView*>( &view ) ) );
20140 return createResultValue( result, view, "vk::Device::createImageView" );
20141 }
20142#ifndef VULKAN_HPP_NO_SMART_HANDLE
20143 VULKAN_HPP_INLINE UniqueImageView Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20144 {
20145 ImageViewDeleter deleter( *this, allocator );
20146 return UniqueImageView( createImageView( createInfo, allocator ), deleter );
20147 }
20148#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20149#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20150
20151 VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, const AllocationCallbacks* pAllocator ) const
20152 {
20153 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20154 }
20155#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20156 VULKAN_HPP_INLINE void Device::destroyImageView( ImageView imageView, Optional<const AllocationCallbacks> allocator ) const
20157 {
20158 vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20159 }
20160#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20161
20162 VULKAN_HPP_INLINE Result Device::createShaderModule( const ShaderModuleCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, ShaderModule* pShaderModule ) const
20163 {
20164 return static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkShaderModule*>( pShaderModule ) ) );
20165 }
20166#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20167 VULKAN_HPP_INLINE ResultValueType<ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20168 {
20169 ShaderModule shaderModule;
20170 Result result = static_cast<Result>( vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkShaderModule*>( &shaderModule ) ) );
20171 return createResultValue( result, shaderModule, "vk::Device::createShaderModule" );
20172 }
20173#ifndef VULKAN_HPP_NO_SMART_HANDLE
20174 VULKAN_HPP_INLINE UniqueShaderModule Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20175 {
20176 ShaderModuleDeleter deleter( *this, allocator );
20177 return UniqueShaderModule( createShaderModule( createInfo, allocator ), deleter );
20178 }
20179#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20180#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20181
20182 VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, const AllocationCallbacks* pAllocator ) const
20183 {
20184 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20185 }
20186#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20187 VULKAN_HPP_INLINE void Device::destroyShaderModule( ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator ) const
20188 {
20189 vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20190 }
20191#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20192
20193 VULKAN_HPP_INLINE Result Device::createPipelineCache( const PipelineCacheCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineCache* pPipelineCache ) const
20194 {
20195 return static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineCache*>( pPipelineCache ) ) );
20196 }
20197#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20198 VULKAN_HPP_INLINE ResultValueType<PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20199 {
20200 PipelineCache pipelineCache;
20201 Result result = static_cast<Result>( vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineCache*>( &pipelineCache ) ) );
20202 return createResultValue( result, pipelineCache, "vk::Device::createPipelineCache" );
20203 }
20204#ifndef VULKAN_HPP_NO_SMART_HANDLE
20205 VULKAN_HPP_INLINE UniquePipelineCache Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20206 {
20207 PipelineCacheDeleter deleter( *this, allocator );
20208 return UniquePipelineCache( createPipelineCache( createInfo, allocator ), deleter );
20209 }
20210#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20211#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20212
20213 VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, const AllocationCallbacks* pAllocator ) const
20214 {
20215 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20216 }
20217#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20218 VULKAN_HPP_INLINE void Device::destroyPipelineCache( PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator ) const
20219 {
20220 vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20221 }
20222#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20223
20224 VULKAN_HPP_INLINE Result Device::getPipelineCacheData( PipelineCache pipelineCache, size_t* pDataSize, void* pData ) const
20225 {
20226 return static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
20227 }
20228#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20229 template <typename Allocator>
20230 VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t,Allocator>>::type Device::getPipelineCacheData( PipelineCache pipelineCache ) const
20231 {
20232 std::vector<uint8_t,Allocator> data;
20233 size_t dataSize;
20234 Result result;
20235 do
20236 {
20237 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
20238 if ( ( result == Result::eSuccess ) && dataSize )
20239 {
20240 data.resize( dataSize );
20241 result = static_cast<Result>( vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void*>( data.data() ) ) );
20242 }
20243 } while ( result == Result::eIncomplete );
20244 assert( dataSize <= data.size() );
20245 data.resize( dataSize );
20246 return createResultValue( result, data, "vk::Device::getPipelineCacheData" );
20247 }
20248#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20249
20250 VULKAN_HPP_INLINE Result Device::mergePipelineCaches( PipelineCache dstCache, uint32_t srcCacheCount, const PipelineCache* pSrcCaches ) const
20251 {
20252 return static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache*>( pSrcCaches ) ) );
20253 }
20254#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20255 VULKAN_HPP_INLINE ResultValueType<void>::type Device::mergePipelineCaches( PipelineCache dstCache, ArrayProxy<const PipelineCache> srcCaches ) const
20256 {
20257 Result result = static_cast<Result>( vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size() , reinterpret_cast<const VkPipelineCache*>( srcCaches.data() ) ) );
20258 return createResultValue( result, "vk::Device::mergePipelineCaches" );
20259 }
20260#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20261
20262 VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const GraphicsPipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
20263 {
20264 return static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
20265 }
20266#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20267 template <typename Allocator>
20268 VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createGraphicsPipelines( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
20269 {
20270 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
20271 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
20272 return createResultValue( result, pipelines, "vk::Device::createGraphicsPipelines" );
20273 }
20274 VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createGraphicsPipeline( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20275 {
20276 Pipeline pipeline;
20277 Result result = static_cast<Result>( vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
20278 return createResultValue( result, pipeline, "vk::Device::createGraphicsPipeline" );
20279 }
20280#ifndef VULKAN_HPP_NO_SMART_HANDLE
20281 template <typename Allocator>
20282 VULKAN_HPP_INLINE std::vector<UniquePipeline> Device::createGraphicsPipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const GraphicsPipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
20283 {
20284 PipelineDeleter deleter( *this, allocator );
20285 std::vector<Pipeline,Allocator> pipelines = createGraphicsPipelines( pipelineCache, createInfos, allocator );
20286 std::vector<UniquePipeline> uniquePipelines;
20287 uniquePipelines.reserve( pipelines.size() );
20288 for ( auto pipeline : pipelines )
20289 {
20290 uniquePipelines.push_back( UniquePipeline( pipeline, deleter ) );
20291 }
20292 return uniquePipelines;
20293 }
20294 VULKAN_HPP_INLINE UniquePipeline Device::createGraphicsPipelineUnique( PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20295 {
20296 PipelineDeleter deleter( *this, allocator );
20297 return UniquePipeline( createGraphicsPipeline( pipelineCache, createInfo, allocator ), deleter );
20298 }
20299#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20300#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20301
20302 VULKAN_HPP_INLINE Result Device::createComputePipelines( PipelineCache pipelineCache, uint32_t createInfoCount, const ComputePipelineCreateInfo* pCreateInfos, const AllocationCallbacks* pAllocator, Pipeline* pPipelines ) const
20303 {
20304 return static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipeline*>( pPipelines ) ) );
20305 }
20306#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20307 template <typename Allocator>
20308 VULKAN_HPP_INLINE typename ResultValueType<std::vector<Pipeline,Allocator>>::type Device::createComputePipelines( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
20309 {
20310 std::vector<Pipeline,Allocator> pipelines( createInfos.size() );
20311 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size() , reinterpret_cast<const VkComputePipelineCreateInfo*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( pipelines.data() ) ) );
20312 return createResultValue( result, pipelines, "vk::Device::createComputePipelines" );
20313 }
20314 VULKAN_HPP_INLINE ResultValueType<Pipeline>::type Device::createComputePipeline( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20315 {
20316 Pipeline pipeline;
20317 Result result = static_cast<Result>( vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1 , reinterpret_cast<const VkComputePipelineCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipeline*>( &pipeline ) ) );
20318 return createResultValue( result, pipeline, "vk::Device::createComputePipeline" );
20319 }
20320#ifndef VULKAN_HPP_NO_SMART_HANDLE
20321 template <typename Allocator>
20322 VULKAN_HPP_INLINE std::vector<UniquePipeline> Device::createComputePipelinesUnique( PipelineCache pipelineCache, ArrayProxy<const ComputePipelineCreateInfo> createInfos, Optional<const AllocationCallbacks> allocator ) const
20323 {
20324 PipelineDeleter deleter( *this, allocator );
20325 std::vector<Pipeline,Allocator> pipelines = createComputePipelines( pipelineCache, createInfos, allocator );
20326 std::vector<UniquePipeline> uniquePipelines;
20327 uniquePipelines.reserve( pipelines.size() );
20328 for ( auto pipeline : pipelines )
20329 {
20330 uniquePipelines.push_back( UniquePipeline( pipeline, deleter ) );
20331 }
20332 return uniquePipelines;
20333 }
20334 VULKAN_HPP_INLINE UniquePipeline Device::createComputePipelineUnique( PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20335 {
20336 PipelineDeleter deleter( *this, allocator );
20337 return UniquePipeline( createComputePipeline( pipelineCache, createInfo, allocator ), deleter );
20338 }
20339#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20340#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20341
20342 VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, const AllocationCallbacks* pAllocator ) const
20343 {
20344 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20345 }
20346#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20347 VULKAN_HPP_INLINE void Device::destroyPipeline( Pipeline pipeline, Optional<const AllocationCallbacks> allocator ) const
20348 {
20349 vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20350 }
20351#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20352
20353 VULKAN_HPP_INLINE Result Device::createPipelineLayout( const PipelineLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, PipelineLayout* pPipelineLayout ) const
20354 {
20355 return static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkPipelineLayout*>( pPipelineLayout ) ) );
20356 }
20357#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20358 VULKAN_HPP_INLINE ResultValueType<PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20359 {
20360 PipelineLayout pipelineLayout;
20361 Result result = static_cast<Result>( vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkPipelineLayout*>( &pipelineLayout ) ) );
20362 return createResultValue( result, pipelineLayout, "vk::Device::createPipelineLayout" );
20363 }
20364#ifndef VULKAN_HPP_NO_SMART_HANDLE
20365 VULKAN_HPP_INLINE UniquePipelineLayout Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20366 {
20367 PipelineLayoutDeleter deleter( *this, allocator );
20368 return UniquePipelineLayout( createPipelineLayout( createInfo, allocator ), deleter );
20369 }
20370#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20371#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20372
20373 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, const AllocationCallbacks* pAllocator ) const
20374 {
20375 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20376 }
20377#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20378 VULKAN_HPP_INLINE void Device::destroyPipelineLayout( PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator ) const
20379 {
20380 vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20381 }
20382#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20383
20384 VULKAN_HPP_INLINE Result Device::createSampler( const SamplerCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Sampler* pSampler ) const
20385 {
20386 return static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSampler*>( pSampler ) ) );
20387 }
20388#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20389 VULKAN_HPP_INLINE ResultValueType<Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20390 {
20391 Sampler sampler;
20392 Result result = static_cast<Result>( vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSampler*>( &sampler ) ) );
20393 return createResultValue( result, sampler, "vk::Device::createSampler" );
20394 }
20395#ifndef VULKAN_HPP_NO_SMART_HANDLE
20396 VULKAN_HPP_INLINE UniqueSampler Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20397 {
20398 SamplerDeleter deleter( *this, allocator );
20399 return UniqueSampler( createSampler( createInfo, allocator ), deleter );
20400 }
20401#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20402#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20403
20404 VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, const AllocationCallbacks* pAllocator ) const
20405 {
20406 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20407 }
20408#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20409 VULKAN_HPP_INLINE void Device::destroySampler( Sampler sampler, Optional<const AllocationCallbacks> allocator ) const
20410 {
20411 vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20412 }
20413#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20414
20415 VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorSetLayout* pSetLayout ) const
20416 {
20417 return static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout*>( pSetLayout ) ) );
20418 }
20419#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20420 VULKAN_HPP_INLINE ResultValueType<DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20421 {
20422 DescriptorSetLayout setLayout;
20423 Result result = static_cast<Result>( vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorSetLayout*>( &setLayout ) ) );
20424 return createResultValue( result, setLayout, "vk::Device::createDescriptorSetLayout" );
20425 }
20426#ifndef VULKAN_HPP_NO_SMART_HANDLE
20427 VULKAN_HPP_INLINE UniqueDescriptorSetLayout Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20428 {
20429 DescriptorSetLayoutDeleter deleter( *this, allocator );
20430 return UniqueDescriptorSetLayout( createDescriptorSetLayout( createInfo, allocator ), deleter );
20431 }
20432#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20433#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20434
20435 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, const AllocationCallbacks* pAllocator ) const
20436 {
20437 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20438 }
20439#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20440 VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator ) const
20441 {
20442 vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20443 }
20444#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20445
20446 VULKAN_HPP_INLINE Result Device::createDescriptorPool( const DescriptorPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, DescriptorPool* pDescriptorPool ) const
20447 {
20448 return static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDescriptorPool*>( pDescriptorPool ) ) );
20449 }
20450#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20451 VULKAN_HPP_INLINE ResultValueType<DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20452 {
20453 DescriptorPool descriptorPool;
20454 Result result = static_cast<Result>( vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDescriptorPool*>( &descriptorPool ) ) );
20455 return createResultValue( result, descriptorPool, "vk::Device::createDescriptorPool" );
20456 }
20457#ifndef VULKAN_HPP_NO_SMART_HANDLE
20458 VULKAN_HPP_INLINE UniqueDescriptorPool Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20459 {
20460 DescriptorPoolDeleter deleter( *this, allocator );
20461 return UniqueDescriptorPool( createDescriptorPool( createInfo, allocator ), deleter );
20462 }
20463#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20464#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20465
20466 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, const AllocationCallbacks* pAllocator ) const
20467 {
20468 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20469 }
20470#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20471 VULKAN_HPP_INLINE void Device::destroyDescriptorPool( DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator ) const
20472 {
20473 vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20474 }
20475#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20476
20477#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20478 VULKAN_HPP_INLINE Result Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
20479 {
20480 return static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
20481 }
20482#else
20483 VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetDescriptorPool( DescriptorPool descriptorPool, DescriptorPoolResetFlags flags ) const
20484 {
20485 Result result = static_cast<Result>( vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
20486 return createResultValue( result, "vk::Device::resetDescriptorPool" );
20487 }
20488#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20489
20490 VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const DescriptorSetAllocateInfo* pAllocateInfo, DescriptorSet* pDescriptorSets ) const
20491 {
20492 return static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet*>( pDescriptorSets ) ) );
20493 }
20494#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20495 template <typename Allocator>
20496 VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet,Allocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo ) const
20497 {
20498 std::vector<DescriptorSet,Allocator> descriptorSets( allocateInfo.descriptorSetCount );
20499 Result result = static_cast<Result>( vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkDescriptorSet*>( descriptorSets.data() ) ) );
20500 return createResultValue( result, descriptorSets, "vk::Device::allocateDescriptorSets" );
20501 }
20502#ifndef VULKAN_HPP_NO_SMART_HANDLE
20503 template <typename Allocator>
20504 VULKAN_HPP_INLINE std::vector<UniqueDescriptorSet> Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo ) const
20505 {
20506 DescriptorSetDeleter deleter( *this, allocateInfo.descriptorPool );
20507 std::vector<DescriptorSet,Allocator> descriptorSets = allocateDescriptorSets( allocateInfo );
20508 std::vector<UniqueDescriptorSet> uniqueDescriptorSets;
20509 uniqueDescriptorSets.reserve( descriptorSets.size() );
20510 for ( auto descriptorSet : descriptorSets )
20511 {
20512 uniqueDescriptorSets.push_back( UniqueDescriptorSet( descriptorSet, deleter ) );
20513 }
20514 return uniqueDescriptorSets;
20515 }
20516#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20517#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20518
20519 VULKAN_HPP_INLINE Result Device::freeDescriptorSets( DescriptorPool descriptorPool, uint32_t descriptorSetCount, const DescriptorSet* pDescriptorSets ) const
20520 {
20521 return static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet*>( pDescriptorSets ) ) );
20522 }
20523#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20524 VULKAN_HPP_INLINE ResultValueType<void>::type Device::freeDescriptorSets( DescriptorPool descriptorPool, ArrayProxy<const DescriptorSet> descriptorSets ) const
20525 {
20526 Result result = static_cast<Result>( vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size() , reinterpret_cast<const VkDescriptorSet*>( descriptorSets.data() ) ) );
20527 return createResultValue( result, "vk::Device::freeDescriptorSets" );
20528 }
20529#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20530
20531 VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const CopyDescriptorSet* pDescriptorCopies ) const
20532 {
20533 vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet*>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet*>( pDescriptorCopies ) );
20534 }
20535#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20536 VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const WriteDescriptorSet> descriptorWrites, ArrayProxy<const CopyDescriptorSet> descriptorCopies ) const
20537 {
20538 vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast<const VkWriteDescriptorSet*>( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast<const VkCopyDescriptorSet*>( descriptorCopies.data() ) );
20539 }
20540#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20541
20542 VULKAN_HPP_INLINE Result Device::createFramebuffer( const FramebufferCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Framebuffer* pFramebuffer ) const
20543 {
20544 return static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFramebuffer*>( pFramebuffer ) ) );
20545 }
20546#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20547 VULKAN_HPP_INLINE ResultValueType<Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20548 {
20549 Framebuffer framebuffer;
20550 Result result = static_cast<Result>( vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkFramebuffer*>( &framebuffer ) ) );
20551 return createResultValue( result, framebuffer, "vk::Device::createFramebuffer" );
20552 }
20553#ifndef VULKAN_HPP_NO_SMART_HANDLE
20554 VULKAN_HPP_INLINE UniqueFramebuffer Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20555 {
20556 FramebufferDeleter deleter( *this, allocator );
20557 return UniqueFramebuffer( createFramebuffer( createInfo, allocator ), deleter );
20558 }
20559#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20560#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20561
20562 VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, const AllocationCallbacks* pAllocator ) const
20563 {
20564 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20565 }
20566#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20567 VULKAN_HPP_INLINE void Device::destroyFramebuffer( Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator ) const
20568 {
20569 vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20570 }
20571#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20572
20573 VULKAN_HPP_INLINE Result Device::createRenderPass( const RenderPassCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, RenderPass* pRenderPass ) const
20574 {
20575 return static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkRenderPass*>( pRenderPass ) ) );
20576 }
20577#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20578 VULKAN_HPP_INLINE ResultValueType<RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20579 {
20580 RenderPass renderPass;
20581 Result result = static_cast<Result>( vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkRenderPass*>( &renderPass ) ) );
20582 return createResultValue( result, renderPass, "vk::Device::createRenderPass" );
20583 }
20584#ifndef VULKAN_HPP_NO_SMART_HANDLE
20585 VULKAN_HPP_INLINE UniqueRenderPass Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20586 {
20587 RenderPassDeleter deleter( *this, allocator );
20588 return UniqueRenderPass( createRenderPass( createInfo, allocator ), deleter );
20589 }
20590#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20591#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20592
20593 VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, const AllocationCallbacks* pAllocator ) const
20594 {
20595 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20596 }
20597#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20598 VULKAN_HPP_INLINE void Device::destroyRenderPass( RenderPass renderPass, Optional<const AllocationCallbacks> allocator ) const
20599 {
20600 vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20601 }
20602#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20603
20604 VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( RenderPass renderPass, Extent2D* pGranularity ) const
20605 {
20606 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( pGranularity ) );
20607 }
20608#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20609 VULKAN_HPP_INLINE Extent2D Device::getRenderAreaGranularity( RenderPass renderPass ) const
20610 {
20611 Extent2D granularity;
20612 vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D*>( &granularity ) );
20613 return granularity;
20614 }
20615#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20616
20617 VULKAN_HPP_INLINE Result Device::createCommandPool( const CommandPoolCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, CommandPool* pCommandPool ) const
20618 {
20619 return static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkCommandPool*>( pCommandPool ) ) );
20620 }
20621#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20622 VULKAN_HPP_INLINE ResultValueType<CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20623 {
20624 CommandPool commandPool;
20625 Result result = static_cast<Result>( vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkCommandPool*>( &commandPool ) ) );
20626 return createResultValue( result, commandPool, "vk::Device::createCommandPool" );
20627 }
20628#ifndef VULKAN_HPP_NO_SMART_HANDLE
20629 VULKAN_HPP_INLINE UniqueCommandPool Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
20630 {
20631 CommandPoolDeleter deleter( *this, allocator );
20632 return UniqueCommandPool( createCommandPool( createInfo, allocator ), deleter );
20633 }
20634#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20635#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20636
20637 VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, const AllocationCallbacks* pAllocator ) const
20638 {
20639 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20640 }
20641#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20642 VULKAN_HPP_INLINE void Device::destroyCommandPool( CommandPool commandPool, Optional<const AllocationCallbacks> allocator ) const
20643 {
20644 vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20645 }
20646#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20647
20648#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
20649 VULKAN_HPP_INLINE Result Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
20650 {
20651 return static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
20652 }
20653#else
20654 VULKAN_HPP_INLINE ResultValueType<void>::type Device::resetCommandPool( CommandPool commandPool, CommandPoolResetFlags flags ) const
20655 {
20656 Result result = static_cast<Result>( vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
20657 return createResultValue( result, "vk::Device::resetCommandPool" );
20658 }
20659#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20660
20661 VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const CommandBufferAllocateInfo* pAllocateInfo, CommandBuffer* pCommandBuffers ) const
20662 {
20663 return static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer*>( pCommandBuffers ) ) );
20664 }
20665#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20666 template <typename Allocator>
20667 VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer,Allocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo ) const
20668 {
20669 std::vector<CommandBuffer,Allocator> commandBuffers( allocateInfo.commandBufferCount );
20670 Result result = static_cast<Result>( vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo*>( &allocateInfo ), reinterpret_cast<VkCommandBuffer*>( commandBuffers.data() ) ) );
20671 return createResultValue( result, commandBuffers, "vk::Device::allocateCommandBuffers" );
20672 }
20673#ifndef VULKAN_HPP_NO_SMART_HANDLE
20674 template <typename Allocator>
20675 VULKAN_HPP_INLINE std::vector<UniqueCommandBuffer> Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo ) const
20676 {
20677 CommandBufferDeleter deleter( *this, allocateInfo.commandPool );
20678 std::vector<CommandBuffer,Allocator> commandBuffers = allocateCommandBuffers( allocateInfo );
20679 std::vector<UniqueCommandBuffer> uniqueCommandBuffers;
20680 uniqueCommandBuffers.reserve( commandBuffers.size() );
20681 for ( auto commandBuffer : commandBuffers )
20682 {
20683 uniqueCommandBuffers.push_back( UniqueCommandBuffer( commandBuffer, deleter ) );
20684 }
20685 return uniqueCommandBuffers;
20686 }
20687#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20688#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20689
20690 VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, uint32_t commandBufferCount, const CommandBuffer* pCommandBuffers ) const
20691 {
20692 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer*>( pCommandBuffers ) );
20693 }
20694#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20695 VULKAN_HPP_INLINE void Device::freeCommandBuffers( CommandPool commandPool, ArrayProxy<const CommandBuffer> commandBuffers ) const
20696 {
20697 vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size() , reinterpret_cast<const VkCommandBuffer*>( commandBuffers.data() ) );
20698 }
20699#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20700
20701 VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const SwapchainCreateInfoKHR* pCreateInfos, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchains ) const
20702 {
20703 return static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchains ) ) );
20704 }
20705#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20706 template <typename Allocator>
20707 VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR,Allocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator ) const
20708 {
20709 std::vector<SwapchainKHR,Allocator> swapchains( createInfos.size() );
20710 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( swapchains.data() ) ) );
20711 return createResultValue( result, swapchains, "vk::Device::createSharedSwapchainsKHR" );
20712 }
20713 VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
20714 {
20715 SwapchainKHR swapchain;
20716 Result result = static_cast<Result>( vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
20717 return createResultValue( result, swapchain, "vk::Device::createSharedSwapchainKHR" );
20718 }
20719#ifndef VULKAN_HPP_NO_SMART_HANDLE
20720 template <typename Allocator>
20721 VULKAN_HPP_INLINE std::vector<UniqueSwapchainKHR> Device::createSharedSwapchainsKHRUnique( ArrayProxy<const SwapchainCreateInfoKHR> createInfos, Optional<const AllocationCallbacks> allocator ) const
20722 {
20723 SwapchainKHRDeleter deleter( *this, allocator );
20724 std::vector<SwapchainKHR,Allocator> swapchainKHRs = createSharedSwapchainsKHR( createInfos, allocator );
20725 std::vector<UniqueSwapchainKHR> uniqueSwapchainKHRs;
20726 uniqueSwapchainKHRs.reserve( swapchainKHRs.size() );
20727 for ( auto swapchainKHR : swapchainKHRs )
20728 {
20729 uniqueSwapchainKHRs.push_back( UniqueSwapchainKHR( swapchainKHR, deleter ) );
20730 }
20731 return uniqueSwapchainKHRs;
20732 }
20733 VULKAN_HPP_INLINE UniqueSwapchainKHR Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
20734 {
20735 SwapchainKHRDeleter deleter( *this, allocator );
20736 return UniqueSwapchainKHR( createSharedSwapchainKHR( createInfo, allocator ), deleter );
20737 }
20738#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20739#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20740
20741 VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const SwapchainCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SwapchainKHR* pSwapchain ) const
20742 {
20743 return static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSwapchainKHR*>( pSwapchain ) ) );
20744 }
20745#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20746 VULKAN_HPP_INLINE ResultValueType<SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
20747 {
20748 SwapchainKHR swapchain;
20749 Result result = static_cast<Result>( vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSwapchainKHR*>( &swapchain ) ) );
20750 return createResultValue( result, swapchain, "vk::Device::createSwapchainKHR" );
20751 }
20752#ifndef VULKAN_HPP_NO_SMART_HANDLE
20753 VULKAN_HPP_INLINE UniqueSwapchainKHR Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
20754 {
20755 SwapchainKHRDeleter deleter( *this, allocator );
20756 return UniqueSwapchainKHR( createSwapchainKHR( createInfo, allocator ), deleter );
20757 }
20758#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20759#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20760
20761 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, const AllocationCallbacks* pAllocator ) const
20762 {
20763 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20764 }
20765#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20766 VULKAN_HPP_INLINE void Device::destroySwapchainKHR( SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator ) const
20767 {
20768 vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20769 }
20770#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20771
20772 VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, Image* pSwapchainImages ) const
20773 {
20774 return static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage*>( pSwapchainImages ) ) );
20775 }
20776#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20777 template <typename Allocator>
20778 VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image,Allocator>>::type Device::getSwapchainImagesKHR( SwapchainKHR swapchain ) const
20779 {
20780 std::vector<Image,Allocator> swapchainImages;
20781 uint32_t swapchainImageCount;
20782 Result result;
20783 do
20784 {
20785 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
20786 if ( ( result == Result::eSuccess ) && swapchainImageCount )
20787 {
20788 swapchainImages.resize( swapchainImageCount );
20789 result = static_cast<Result>( vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage*>( swapchainImages.data() ) ) );
20790 }
20791 } while ( result == Result::eIncomplete );
20792 assert( swapchainImageCount <= swapchainImages.size() );
20793 swapchainImages.resize( swapchainImageCount );
20794 return createResultValue( result, swapchainImages, "vk::Device::getSwapchainImagesKHR" );
20795 }
20796#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20797
20798 VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence, uint32_t* pImageIndex ) const
20799 {
20800 return static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
20801 }
20802#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20803 VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( SwapchainKHR swapchain, uint64_t timeout, Semaphore semaphore, Fence fence ) const
20804 {
20805 uint32_t imageIndex;
20806 Result result = static_cast<Result>( vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
20807 return createResultValue( result, imageIndex, "vk::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } );
20808 }
20809#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20810
20811 VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( DebugMarkerObjectNameInfoEXT* pNameInfo ) const
20812 {
20813 return static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( pNameInfo ) ) );
20814 }
20815#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20816 VULKAN_HPP_INLINE ResultValueType<DebugMarkerObjectNameInfoEXT>::type Device::debugMarkerSetObjectNameEXT() const
20817 {
20818 DebugMarkerObjectNameInfoEXT nameInfo;
20819 Result result = static_cast<Result>( vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( &nameInfo ) ) );
20820 return createResultValue( result, nameInfo, "vk::Device::debugMarkerSetObjectNameEXT" );
20821 }
20822#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20823
20824 VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( DebugMarkerObjectTagInfoEXT* pTagInfo ) const
20825 {
20826 return static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( pTagInfo ) ) );
20827 }
20828#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20829 VULKAN_HPP_INLINE ResultValueType<DebugMarkerObjectTagInfoEXT>::type Device::debugMarkerSetObjectTagEXT() const
20830 {
20831 DebugMarkerObjectTagInfoEXT tagInfo;
20832 Result result = static_cast<Result>( vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( &tagInfo ) ) );
20833 return createResultValue( result, tagInfo, "vk::Device::debugMarkerSetObjectTagEXT" );
20834 }
20835#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20836
20837#ifdef VK_USE_PLATFORM_WIN32_KHR
20838 VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const
20839 {
20840 return static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
20841 }
20842#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20843 VULKAN_HPP_INLINE ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( DeviceMemory memory, ExternalMemoryHandleTypeFlagsNV handleType ) const
20844 {
20845 HANDLE handle;
20846 Result result = static_cast<Result>( vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
20847 return createResultValue( result, handle, "vk::Device::getMemoryWin32HandleNV" );
20848 }
20849#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20850#endif /*VK_USE_PLATFORM_WIN32_KHR*/
20851
20852 VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, IndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const
20853 {
20854 return static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( pIndirectCommandsLayout ) ) );
20855 }
20856#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20857 VULKAN_HPP_INLINE ResultValueType<IndirectCommandsLayoutNVX>::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
20858 {
20859 IndirectCommandsLayoutNVX indirectCommandsLayout;
20860 Result result = static_cast<Result>( vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkIndirectCommandsLayoutNVX*>( &indirectCommandsLayout ) ) );
20861 return createResultValue( result, indirectCommandsLayout, "vk::Device::createIndirectCommandsLayoutNVX" );
20862 }
20863#ifndef VULKAN_HPP_NO_SMART_HANDLE
20864 VULKAN_HPP_INLINE UniqueIndirectCommandsLayoutNVX Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
20865 {
20866 IndirectCommandsLayoutNVXDeleter deleter( *this, allocator );
20867 return UniqueIndirectCommandsLayoutNVX( createIndirectCommandsLayoutNVX( createInfo, allocator ), deleter );
20868 }
20869#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20870#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20871
20872 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, const AllocationCallbacks* pAllocator ) const
20873 {
20874 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20875 }
20876#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20877 VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( IndirectCommandsLayoutNVX indirectCommandsLayout, Optional<const AllocationCallbacks> allocator ) const
20878 {
20879 vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast<VkIndirectCommandsLayoutNVX>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20880 }
20881#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20882
20883 VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const ObjectTableCreateInfoNVX* pCreateInfo, const AllocationCallbacks* pAllocator, ObjectTableNVX* pObjectTable ) const
20884 {
20885 return static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkObjectTableNVX*>( pObjectTable ) ) );
20886 }
20887#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20888 VULKAN_HPP_INLINE ResultValueType<ObjectTableNVX>::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
20889 {
20890 ObjectTableNVX objectTable;
20891 Result result = static_cast<Result>( vkCreateObjectTableNVX( m_device, reinterpret_cast<const VkObjectTableCreateInfoNVX*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkObjectTableNVX*>( &objectTable ) ) );
20892 return createResultValue( result, objectTable, "vk::Device::createObjectTableNVX" );
20893 }
20894#ifndef VULKAN_HPP_NO_SMART_HANDLE
20895 VULKAN_HPP_INLINE UniqueObjectTableNVX Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional<const AllocationCallbacks> allocator ) const
20896 {
20897 ObjectTableNVXDeleter deleter( *this, allocator );
20898 return UniqueObjectTableNVX( createObjectTableNVX( createInfo, allocator ), deleter );
20899 }
20900#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
20901#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20902
20903 VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, const AllocationCallbacks* pAllocator ) const
20904 {
20905 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
20906 }
20907#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20908 VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( ObjectTableNVX objectTable, Optional<const AllocationCallbacks> allocator ) const
20909 {
20910 vkDestroyObjectTableNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
20911 }
20912#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20913
20914 VULKAN_HPP_INLINE Result Device::registerObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const
20915 {
20916 return static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectTableEntryNVX* const*>( ppObjectTableEntries ), pObjectIndices ) );
20917 }
20918#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20919 VULKAN_HPP_INLINE ResultValueType<void>::type Device::registerObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectTableEntryNVX* const> pObjectTableEntries, ArrayProxy<const uint32_t> objectIndices ) const
20920 {
20921#ifdef VULKAN_HPP_NO_EXCEPTIONS
20922 assert( pObjectTableEntries.size() == objectIndices.size() );
20923#else
20924 if ( pObjectTableEntries.size() != objectIndices.size() )
20925 {
20926 throw std::logic_error( "vk::Device::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" );
20927 }
20928#endif // VULKAN_HPP_NO_EXCEPTIONS
20929 Result result = static_cast<Result>( vkRegisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), pObjectTableEntries.size() , reinterpret_cast<const VkObjectTableEntryNVX* const*>( pObjectTableEntries.data() ), objectIndices.data() ) );
20930 return createResultValue( result, "vk::Device::registerObjectsNVX" );
20931 }
20932#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20933
20934 VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( ObjectTableNVX objectTable, uint32_t objectCount, const ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const
20935 {
20936 return static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectCount, reinterpret_cast<const VkObjectEntryTypeNVX*>( pObjectEntryTypes ), pObjectIndices ) );
20937 }
20938#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20939 VULKAN_HPP_INLINE ResultValueType<void>::type Device::unregisterObjectsNVX( ObjectTableNVX objectTable, ArrayProxy<const ObjectEntryTypeNVX> objectEntryTypes, ArrayProxy<const uint32_t> objectIndices ) const
20940 {
20941#ifdef VULKAN_HPP_NO_EXCEPTIONS
20942 assert( objectEntryTypes.size() == objectIndices.size() );
20943#else
20944 if ( objectEntryTypes.size() != objectIndices.size() )
20945 {
20946 throw std::logic_error( "vk::Device::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" );
20947 }
20948#endif // VULKAN_HPP_NO_EXCEPTIONS
20949 Result result = static_cast<Result>( vkUnregisterObjectsNVX( m_device, static_cast<VkObjectTableNVX>( objectTable ), objectEntryTypes.size() , reinterpret_cast<const VkObjectEntryTypeNVX*>( objectEntryTypes.data() ), objectIndices.data() ) );
20950 return createResultValue( result, "vk::Device::unregisterObjectsNVX" );
20951 }
20952#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20953
20954 VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( CommandPool commandPool, CommandPoolTrimFlagsKHR flags ) const
20955 {
20956 vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlagsKHR>( flags ) );
20957 }
20958
20959 VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT* pDisplayPowerInfo ) const
20960 {
20961 return static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( pDisplayPowerInfo ) ) );
20962 }
20963#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20964 VULKAN_HPP_INLINE ResultValueType<void>::type Device::displayPowerControlEXT( DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo ) const
20965 {
20966 Result result = static_cast<Result>( vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT*>( &displayPowerInfo ) ) );
20967 return createResultValue( result, "vk::Device::displayPowerControlEXT" );
20968 }
20969#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20970
20971 VULKAN_HPP_INLINE Result Device::registerEventEXT( const DeviceEventInfoEXT* pDeviceEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
20972 {
20973 return static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
20974 }
20975#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20976 VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, const AllocationCallbacks & allocator ) const
20977 {
20978 Fence fence;
20979 Result result = static_cast<Result>( vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT*>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
20980 return createResultValue( result, fence, "vk::Device::registerEventEXT" );
20981 }
20982#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20983
20984 VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT* pDisplayEventInfo, const AllocationCallbacks* pAllocator, Fence* pFence ) const
20985 {
20986 return static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkFence*>( pFence ) ) );
20987 }
20988#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
20989 VULKAN_HPP_INLINE ResultValueType<Fence>::type Device::registerDisplayEventEXT( DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, const AllocationCallbacks & allocator ) const
20990 {
20991 Fence fence;
20992 Result result = static_cast<Result>( vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT*>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks*>( &allocator ), reinterpret_cast<VkFence*>( &fence ) ) );
20993 return createResultValue( result, fence, "vk::Device::registerDisplayEventEXT" );
20994 }
20995#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
20996
20997 VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const
20998 {
20999 return static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
21000 }
21001#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21002 VULKAN_HPP_INLINE ResultValue<uint64_t> Device::getSwapchainCounterEXT( SwapchainKHR swapchain, SurfaceCounterFlagBitsEXT counter ) const
21003 {
21004 uint64_t counterValue;
21005 Result result = static_cast<Result>( vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
21006 return createResultValue( result, counterValue, "vk::Device::getSwapchainCounterEXT", { Result::eSuccess, Result::eErrorDeviceLost, Result::eErrorOutOfDateKHR } );
21007 }
21008#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21009#ifndef VULKAN_HPP_NO_SMART_HANDLE
21010 class DeviceDeleter;
21011 using UniqueDevice = UniqueHandle<Device, DeviceDeleter>;
21012#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
21013
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021014 class PhysicalDevice
21015 {
21016 public:
21017 PhysicalDevice()
21018 : m_physicalDevice(VK_NULL_HANDLE)
21019 {}
21020
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070021021 PhysicalDevice( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021022 : m_physicalDevice(VK_NULL_HANDLE)
21023 {}
21024
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021025#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
21026 PhysicalDevice(VkPhysicalDevice physicalDevice)
21027 : m_physicalDevice(physicalDevice)
21028 {}
21029
21030 PhysicalDevice& operator=(VkPhysicalDevice physicalDevice)
21031 {
21032 m_physicalDevice = physicalDevice;
21033 return *this;
21034 }
21035#endif
21036
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070021037 PhysicalDevice& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021038 {
21039 m_physicalDevice = VK_NULL_HANDLE;
21040 return *this;
21041 }
21042
Lenny Komowebf33162016-08-26 14:10:08 -060021043 bool operator==(PhysicalDevice const &rhs) const
21044 {
21045 return m_physicalDevice == rhs.m_physicalDevice;
21046 }
21047
21048 bool operator!=(PhysicalDevice const &rhs) const
21049 {
21050 return m_physicalDevice != rhs.m_physicalDevice;
21051 }
21052
21053 bool operator<(PhysicalDevice const &rhs) const
21054 {
21055 return m_physicalDevice < rhs.m_physicalDevice;
21056 }
21057
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021058 void getProperties( PhysicalDeviceProperties* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021059#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021060 PhysicalDeviceProperties getProperties() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021061#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21062
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021063 void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021064#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021065 template <typename Allocator = std::allocator<QueueFamilyProperties>>
21066 std::vector<QueueFamilyProperties,Allocator> getQueueFamilyProperties() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021067#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21068
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021069 void getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021070#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021071 PhysicalDeviceMemoryProperties getMemoryProperties() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021072#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21073
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021074 void getFeatures( PhysicalDeviceFeatures* pFeatures ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021075#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021076 PhysicalDeviceFeatures getFeatures() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021077#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21078
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021079 void getFormatProperties( Format format, FormatProperties* pFormatProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021080#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021081 FormatProperties getFormatProperties( Format format ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021082#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21083
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021084 Result getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021085#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021086 ResultValueType<ImageFormatProperties>::type getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021087#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21088
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021089 Result createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021090#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021091 ResultValueType<Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
21092#ifndef VULKAN_HPP_NO_SMART_HANDLE
21093 UniqueDevice createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
21094#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021095#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21096
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021097 Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021098#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021099 template <typename Allocator = std::allocator<LayerProperties>>
21100 typename ResultValueType<std::vector<LayerProperties,Allocator>>::type enumerateDeviceLayerProperties() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021101#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21102
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021103 Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021104#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021105 template <typename Allocator = std::allocator<ExtensionProperties>>
21106 typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021107#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21108
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021109 void getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021110#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021111 template <typename Allocator = std::allocator<SparseImageFormatProperties>>
21112 std::vector<SparseImageFormatProperties,Allocator> getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021113#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21114
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021115 Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021116#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021117 template <typename Allocator = std::allocator<DisplayPropertiesKHR>>
21118 typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type getDisplayPropertiesKHR() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021119#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21120
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021121 Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021122#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021123 template <typename Allocator = std::allocator<DisplayPlanePropertiesKHR>>
21124 typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type getDisplayPlanePropertiesKHR() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021125#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21126
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021127 Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021128#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021129 template <typename Allocator = std::allocator<DisplayKHR>>
21130 typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021131#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21132
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021133 Result getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021134#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021135 template <typename Allocator = std::allocator<DisplayModePropertiesKHR>>
21136 typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type getDisplayModePropertiesKHR( DisplayKHR display ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021137#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21138
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021139 Result createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021140#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021141 ResultValueType<DisplayModeKHR>::type createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021142#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21143
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021144 Result getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021145#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021146 ResultValueType<DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021147#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21148
21149#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021150 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const;
21151#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21152 Bool32 getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const;
21153#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021154#endif /*VK_USE_PLATFORM_MIR_KHR*/
21155
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021156 Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021157#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021158 ResultValueType<Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021159#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21160
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021161 Result getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021162#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021163 ResultValueType<SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021164#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21165
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021166 Result getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021167#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021168 template <typename Allocator = std::allocator<SurfaceFormatKHR>>
21169 typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type getSurfaceFormatsKHR( SurfaceKHR surface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021170#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21171
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021172 Result getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021173#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021174 template <typename Allocator = std::allocator<PresentModeKHR>>
21175 typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type getSurfacePresentModesKHR( SurfaceKHR surface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021176#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21177
21178#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021179 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const;
21180#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21181 Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const;
21182#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021183#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
21184
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021185#ifdef VK_USE_PLATFORM_WIN32_KHR
21186 Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const;
21187#endif /*VK_USE_PLATFORM_WIN32_KHR*/
21188
21189#ifdef VK_USE_PLATFORM_XLIB_KHR
21190 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021191#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021192 Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const;
21193#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21194#endif /*VK_USE_PLATFORM_XLIB_KHR*/
21195
21196#ifdef VK_USE_PLATFORM_XCB_KHR
21197 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const;
21198#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21199 Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const;
21200#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21201#endif /*VK_USE_PLATFORM_XCB_KHR*/
21202
21203 Result getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const;
21204#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21205 ResultValueType<ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const;
21206#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21207
21208 void getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const;
21209#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21210 DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features ) const;
21211#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21212
21213 void getFeatures2KHR( PhysicalDeviceFeatures2KHR* pFeatures ) const;
21214#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21215 PhysicalDeviceFeatures2KHR getFeatures2KHR() const;
21216#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21217
21218 void getProperties2KHR( PhysicalDeviceProperties2KHR* pProperties ) const;
21219#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21220 PhysicalDeviceProperties2KHR getProperties2KHR() const;
21221#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21222
21223 void getFormatProperties2KHR( Format format, FormatProperties2KHR* pFormatProperties ) const;
21224#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21225 FormatProperties2KHR getFormatProperties2KHR( Format format ) const;
21226#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21227
21228 Result getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, ImageFormatProperties2KHR* pImageFormatProperties ) const;
21229#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21230 ResultValueType<ImageFormatProperties2KHR>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR & imageFormatInfo ) const;
21231#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21232
21233 void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2KHR* pQueueFamilyProperties ) const;
21234#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21235 template <typename Allocator = std::allocator<QueueFamilyProperties2KHR>>
21236 std::vector<QueueFamilyProperties2KHR,Allocator> getQueueFamilyProperties2KHR() const;
21237#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21238
21239 void getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2KHR* pMemoryProperties ) const;
21240#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21241 PhysicalDeviceMemoryProperties2KHR getMemoryProperties2KHR() const;
21242#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21243
21244 void getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2KHR* pProperties ) const;
21245#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21246 template <typename Allocator = std::allocator<SparseImageFormatProperties2KHR>>
21247 std::vector<SparseImageFormatProperties2KHR,Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR & formatInfo ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021248#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21249
21250#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021251 Result releaseDisplayEXT( DisplayKHR display ) const;
21252#else
21253 ResultValueType<void>::type releaseDisplayEXT( DisplayKHR display ) const;
Mark Young39389872017-01-19 21:10:49 -070021254#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21255
21256#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021257 Result acquireXlibDisplayEXT( Display* dpy, DisplayKHR display ) const;
Mark Young39389872017-01-19 21:10:49 -070021258#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021259 ResultValueType<Display>::type acquireXlibDisplayEXT( DisplayKHR display ) const;
Mark Young39389872017-01-19 21:10:49 -070021260#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Young39389872017-01-19 21:10:49 -070021261#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
21262
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021263#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
21264 Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay ) const;
Mark Young39389872017-01-19 21:10:49 -070021265#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021266 ResultValueType<DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const;
Mark Young39389872017-01-19 21:10:49 -070021267#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021268#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
Mark Young39389872017-01-19 21:10:49 -070021269
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021270 Result getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities ) const;
Mark Young39389872017-01-19 21:10:49 -070021271#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021272 ResultValueType<SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( SurfaceKHR surface ) const;
Mark Young39389872017-01-19 21:10:49 -070021273#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21274
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021275#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
21276 explicit
21277#endif
21278 operator VkPhysicalDevice() const
21279 {
21280 return m_physicalDevice;
21281 }
21282
21283 explicit operator bool() const
21284 {
21285 return m_physicalDevice != VK_NULL_HANDLE;
21286 }
21287
21288 bool operator!() const
21289 {
21290 return m_physicalDevice == VK_NULL_HANDLE;
21291 }
21292
21293 private:
21294 VkPhysicalDevice m_physicalDevice;
21295 };
21296 static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
21297
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021298#ifndef VULKAN_HPP_NO_SMART_HANDLE
21299 class DeviceDeleter
21300 {
21301 public:
21302 DeviceDeleter( Optional<const AllocationCallbacks> allocator = nullptr )
21303 : m_allocator( allocator )
21304 {}
21305
21306 void operator()( Device device )
21307 {
21308 device.destroy( m_allocator );
21309 }
21310
21311 private:
21312 Optional<const AllocationCallbacks> m_allocator;
21313 };
21314#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
21315
21316 VULKAN_HPP_INLINE void PhysicalDevice::getProperties( PhysicalDeviceProperties* pProperties ) const
21317 {
21318 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( pProperties ) );
21319 }
21320#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21321 VULKAN_HPP_INLINE PhysicalDeviceProperties PhysicalDevice::getProperties() const
21322 {
21323 PhysicalDeviceProperties properties;
21324 vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties*>( &properties ) );
21325 return properties;
21326 }
21327#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21328
21329 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties* pQueueFamilyProperties ) const
21330 {
21331 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( pQueueFamilyProperties ) );
21332 }
21333#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21334 template <typename Allocator>
21335 VULKAN_HPP_INLINE std::vector<QueueFamilyProperties,Allocator> PhysicalDevice::getQueueFamilyProperties() const
21336 {
21337 std::vector<QueueFamilyProperties,Allocator> queueFamilyProperties;
21338 uint32_t queueFamilyPropertyCount;
21339 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
21340 queueFamilyProperties.resize( queueFamilyPropertyCount );
21341 vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties*>( queueFamilyProperties.data() ) );
21342 return queueFamilyProperties;
21343 }
21344#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21345
21346 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( PhysicalDeviceMemoryProperties* pMemoryProperties ) const
21347 {
21348 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( pMemoryProperties ) );
21349 }
21350#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21351 VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const
21352 {
21353 PhysicalDeviceMemoryProperties memoryProperties;
21354 vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( &memoryProperties ) );
21355 return memoryProperties;
21356 }
21357#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21358
21359 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( PhysicalDeviceFeatures* pFeatures ) const
21360 {
21361 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( pFeatures ) );
21362 }
21363#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21364 VULKAN_HPP_INLINE PhysicalDeviceFeatures PhysicalDevice::getFeatures() const
21365 {
21366 PhysicalDeviceFeatures features;
21367 vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures*>( &features ) );
21368 return features;
21369 }
21370#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21371
21372 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( Format format, FormatProperties* pFormatProperties ) const
21373 {
21374 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( pFormatProperties ) );
21375 }
21376#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21377 VULKAN_HPP_INLINE FormatProperties PhysicalDevice::getFormatProperties( Format format ) const
21378 {
21379 FormatProperties formatProperties;
21380 vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties*>( &formatProperties ) );
21381 return formatProperties;
21382 }
21383#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21384
21385 VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ImageFormatProperties* pImageFormatProperties ) const
21386 {
21387 return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( pImageFormatProperties ) ) );
21388 }
21389#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21390 VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags ) const
21391 {
21392 ImageFormatProperties imageFormatProperties;
21393 Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties*>( &imageFormatProperties ) ) );
21394 return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties" );
21395 }
21396#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21397
21398 VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const DeviceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Device* pDevice ) const
21399 {
21400 return static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDevice*>( pDevice ) ) );
21401 }
21402#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21403 VULKAN_HPP_INLINE ResultValueType<Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
21404 {
21405 Device device;
21406 Result result = static_cast<Result>( vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDevice*>( &device ) ) );
21407 return createResultValue( result, device, "vk::PhysicalDevice::createDevice" );
21408 }
21409#ifndef VULKAN_HPP_NO_SMART_HANDLE
21410 VULKAN_HPP_INLINE UniqueDevice PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator ) const
21411 {
21412 DeviceDeleter deleter( allocator );
21413 return UniqueDevice( createDevice( createInfo, allocator ), deleter );
21414 }
21415#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
21416#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21417
21418 VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, LayerProperties* pProperties ) const
21419 {
21420 return static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties*>( pProperties ) ) );
21421 }
21422#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21423 template <typename Allocator>
21424 VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties,Allocator>>::type PhysicalDevice::enumerateDeviceLayerProperties() const
21425 {
21426 std::vector<LayerProperties,Allocator> properties;
21427 uint32_t propertyCount;
21428 Result result;
21429 do
21430 {
21431 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
21432 if ( ( result == Result::eSuccess ) && propertyCount )
21433 {
21434 properties.resize( propertyCount );
21435 result = static_cast<Result>( vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties*>( properties.data() ) ) );
21436 }
21437 } while ( result == Result::eIncomplete );
21438 assert( propertyCount <= properties.size() );
21439 properties.resize( propertyCount );
21440 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceLayerProperties" );
21441 }
21442#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21443
21444 VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, ExtensionProperties* pProperties ) const
21445 {
21446 return static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties*>( pProperties ) ) );
21447 }
21448#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21449 template <typename Allocator>
21450 VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties,Allocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
21451 {
21452 std::vector<ExtensionProperties,Allocator> properties;
21453 uint32_t propertyCount;
21454 Result result;
21455 do
21456 {
21457 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
21458 if ( ( result == Result::eSuccess ) && propertyCount )
21459 {
21460 properties.resize( propertyCount );
21461 result = static_cast<Result>( vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties*>( properties.data() ) ) );
21462 }
21463 } while ( result == Result::eIncomplete );
21464 assert( propertyCount <= properties.size() );
21465 properties.resize( propertyCount );
21466 return createResultValue( result, properties, "vk::PhysicalDevice::enumerateDeviceExtensionProperties" );
21467 }
21468#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21469
21470 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling, uint32_t* pPropertyCount, SparseImageFormatProperties* pProperties ) const
21471 {
21472 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( pProperties ) );
21473 }
21474#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21475 template <typename Allocator>
21476 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties,Allocator> PhysicalDevice::getSparseImageFormatProperties( Format format, ImageType type, SampleCountFlagBits samples, ImageUsageFlags usage, ImageTiling tiling ) const
21477 {
21478 std::vector<SparseImageFormatProperties,Allocator> properties;
21479 uint32_t propertyCount;
21480 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
21481 properties.resize( propertyCount );
21482 vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties*>( properties.data() ) );
21483 return properties;
21484 }
21485#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21486
21487 VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, DisplayPropertiesKHR* pProperties ) const
21488 {
21489 return static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( pProperties ) ) );
21490 }
21491#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21492 template <typename Allocator>
21493 VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPropertiesKHR() const
21494 {
21495 std::vector<DisplayPropertiesKHR,Allocator> properties;
21496 uint32_t propertyCount;
21497 Result result;
21498 do
21499 {
21500 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
21501 if ( ( result == Result::eSuccess ) && propertyCount )
21502 {
21503 properties.resize( propertyCount );
21504 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR*>( properties.data() ) ) );
21505 }
21506 } while ( result == Result::eIncomplete );
21507 assert( propertyCount <= properties.size() );
21508 properties.resize( propertyCount );
21509 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPropertiesKHR" );
21510 }
21511#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21512
21513 VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, DisplayPlanePropertiesKHR* pProperties ) const
21514 {
21515 return static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( pProperties ) ) );
21516 }
21517#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21518 template <typename Allocator>
21519 VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR() const
21520 {
21521 std::vector<DisplayPlanePropertiesKHR,Allocator> properties;
21522 uint32_t propertyCount;
21523 Result result;
21524 do
21525 {
21526 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
21527 if ( ( result == Result::eSuccess ) && propertyCount )
21528 {
21529 properties.resize( propertyCount );
21530 result = static_cast<Result>( vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR*>( properties.data() ) ) );
21531 }
21532 } while ( result == Result::eIncomplete );
21533 assert( propertyCount <= properties.size() );
21534 properties.resize( propertyCount );
21535 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayPlanePropertiesKHR" );
21536 }
21537#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21538
21539 VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, DisplayKHR* pDisplays ) const
21540 {
21541 return static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR*>( pDisplays ) ) );
21542 }
21543#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21544 template <typename Allocator>
21545 VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR,Allocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
21546 {
21547 std::vector<DisplayKHR,Allocator> displays;
21548 uint32_t displayCount;
21549 Result result;
21550 do
21551 {
21552 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
21553 if ( ( result == Result::eSuccess ) && displayCount )
21554 {
21555 displays.resize( displayCount );
21556 result = static_cast<Result>( vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR*>( displays.data() ) ) );
21557 }
21558 } while ( result == Result::eIncomplete );
21559 assert( displayCount <= displays.size() );
21560 displays.resize( displayCount );
21561 return createResultValue( result, displays, "vk::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
21562 }
21563#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21564
21565 VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display, uint32_t* pPropertyCount, DisplayModePropertiesKHR* pProperties ) const
21566 {
21567 return static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( pProperties ) ) );
21568 }
21569#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21570 template <typename Allocator>
21571 VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR,Allocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( DisplayKHR display ) const
21572 {
21573 std::vector<DisplayModePropertiesKHR,Allocator> properties;
21574 uint32_t propertyCount;
21575 Result result;
21576 do
21577 {
21578 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
21579 if ( ( result == Result::eSuccess ) && propertyCount )
21580 {
21581 properties.resize( propertyCount );
21582 result = static_cast<Result>( vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR*>( properties.data() ) ) );
21583 }
21584 } while ( result == Result::eIncomplete );
21585 assert( propertyCount <= properties.size() );
21586 properties.resize( propertyCount );
21587 return createResultValue( result, properties, "vk::PhysicalDevice::getDisplayModePropertiesKHR" );
21588 }
21589#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21590
21591 VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, DisplayModeKHR* pMode ) const
21592 {
21593 return static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDisplayModeKHR*>( pMode ) ) );
21594 }
21595#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21596 VULKAN_HPP_INLINE ResultValueType<DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
21597 {
21598 DisplayModeKHR mode;
21599 Result result = static_cast<Result>( vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDisplayModeKHR*>( &mode ) ) );
21600 return createResultValue( result, mode, "vk::PhysicalDevice::createDisplayModeKHR" );
21601 }
21602#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21603
21604 VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex, DisplayPlaneCapabilitiesKHR* pCapabilities ) const
21605 {
21606 return static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( pCapabilities ) ) );
21607 }
21608#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21609 VULKAN_HPP_INLINE ResultValueType<DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( DisplayModeKHR mode, uint32_t planeIndex ) const
21610 {
21611 DisplayPlaneCapabilitiesKHR capabilities;
21612 Result result = static_cast<Result>( vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( &capabilities ) ) );
21613 return createResultValue( result, capabilities, "vk::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
21614 }
21615#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21616
21617#ifdef VK_USE_PLATFORM_MIR_KHR
21618 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection* connection ) const
21619 {
21620 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection );
21621 }
21622#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21623 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getMirPresentationSupportKHR( uint32_t queueFamilyIndex, MirConnection & connection ) const
21624 {
21625 return vkGetPhysicalDeviceMirPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection );
21626 }
21627#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21628#endif /*VK_USE_PLATFORM_MIR_KHR*/
21629
21630 VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface, Bool32* pSupported ) const
21631 {
21632 return static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), pSupported ) );
21633 }
21634#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21635 VULKAN_HPP_INLINE ResultValueType<Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, SurfaceKHR surface ) const
21636 {
21637 Bool32 supported;
21638 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), &supported ) );
21639 return createResultValue( result, supported, "vk::PhysicalDevice::getSurfaceSupportKHR" );
21640 }
21641#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21642
21643 VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface, SurfaceCapabilitiesKHR* pSurfaceCapabilities ) const
21644 {
21645 return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( pSurfaceCapabilities ) ) );
21646 }
21647#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21648 VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( SurfaceKHR surface ) const
21649 {
21650 SurfaceCapabilitiesKHR surfaceCapabilities;
21651 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR*>( &surfaceCapabilities ) ) );
21652 return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilitiesKHR" );
21653 }
21654#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21655
21656 VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface, uint32_t* pSurfaceFormatCount, SurfaceFormatKHR* pSurfaceFormats ) const
21657 {
21658 return static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( pSurfaceFormats ) ) );
21659 }
21660#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21661 template <typename Allocator>
21662 VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR,Allocator>>::type PhysicalDevice::getSurfaceFormatsKHR( SurfaceKHR surface ) const
21663 {
21664 std::vector<SurfaceFormatKHR,Allocator> surfaceFormats;
21665 uint32_t surfaceFormatCount;
21666 Result result;
21667 do
21668 {
21669 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
21670 if ( ( result == Result::eSuccess ) && surfaceFormatCount )
21671 {
21672 surfaceFormats.resize( surfaceFormatCount );
21673 result = static_cast<Result>( vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR*>( surfaceFormats.data() ) ) );
21674 }
21675 } while ( result == Result::eIncomplete );
21676 assert( surfaceFormatCount <= surfaceFormats.size() );
21677 surfaceFormats.resize( surfaceFormatCount );
21678 return createResultValue( result, surfaceFormats, "vk::PhysicalDevice::getSurfaceFormatsKHR" );
21679 }
21680#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21681
21682 VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface, uint32_t* pPresentModeCount, PresentModeKHR* pPresentModes ) const
21683 {
21684 return static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR*>( pPresentModes ) ) );
21685 }
21686#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21687 template <typename Allocator>
21688 VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR,Allocator>>::type PhysicalDevice::getSurfacePresentModesKHR( SurfaceKHR surface ) const
21689 {
21690 std::vector<PresentModeKHR,Allocator> presentModes;
21691 uint32_t presentModeCount;
21692 Result result;
21693 do
21694 {
21695 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
21696 if ( ( result == Result::eSuccess ) && presentModeCount )
21697 {
21698 presentModes.resize( presentModeCount );
21699 result = static_cast<Result>( vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR*>( presentModes.data() ) ) );
21700 }
21701 } while ( result == Result::eIncomplete );
21702 assert( presentModeCount <= presentModes.size() );
21703 presentModes.resize( presentModeCount );
21704 return createResultValue( result, presentModes, "vk::PhysicalDevice::getSurfacePresentModesKHR" );
21705 }
21706#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21707
21708#ifdef VK_USE_PLATFORM_WAYLAND_KHR
21709 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display ) const
21710 {
21711 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display );
21712 }
21713#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21714 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const
21715 {
21716 return vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
21717 }
21718#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21719#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
21720
21721#ifdef VK_USE_PLATFORM_WIN32_KHR
21722 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const
21723 {
21724 return vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex );
21725 }
21726#endif /*VK_USE_PLATFORM_WIN32_KHR*/
21727
21728#ifdef VK_USE_PLATFORM_XLIB_KHR
21729 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const
21730 {
21731 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID );
21732 }
21733#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21734 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const
21735 {
21736 return vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
21737 }
21738#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21739#endif /*VK_USE_PLATFORM_XLIB_KHR*/
21740
21741#ifdef VK_USE_PLATFORM_XCB_KHR
21742 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const
21743 {
21744 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id );
21745 }
21746#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21747 VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const
21748 {
21749 return vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
21750 }
21751#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21752#endif /*VK_USE_PLATFORM_XCB_KHR*/
21753
21754 VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType, ExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const
21755 {
21756 return static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( pExternalImageFormatProperties ) ) );
21757 }
21758#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21759 VULKAN_HPP_INLINE ResultValueType<ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( Format format, ImageType type, ImageTiling tiling, ImageUsageFlags usage, ImageCreateFlags flags, ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
21760 {
21761 ExternalImageFormatPropertiesNV externalImageFormatProperties;
21762 Result result = static_cast<Result>( vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV*>( &externalImageFormatProperties ) ) );
21763 return createResultValue( result, externalImageFormatProperties, "vk::PhysicalDevice::getExternalImageFormatPropertiesNV" );
21764 }
21765#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21766
21767 VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX* pFeatures, DeviceGeneratedCommandsLimitsNVX* pLimits ) const
21768 {
21769 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( pFeatures ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( pLimits ) );
21770 }
21771#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21772 VULKAN_HPP_INLINE DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features ) const
21773 {
21774 DeviceGeneratedCommandsLimitsNVX limits;
21775 vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast<VkDeviceGeneratedCommandsFeaturesNVX*>( &features ), reinterpret_cast<VkDeviceGeneratedCommandsLimitsNVX*>( &limits ) );
21776 return limits;
21777 }
21778#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21779
21780 VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( PhysicalDeviceFeatures2KHR* pFeatures ) const
21781 {
21782 vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( pFeatures ) );
21783 }
21784#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21785 VULKAN_HPP_INLINE PhysicalDeviceFeatures2KHR PhysicalDevice::getFeatures2KHR() const
21786 {
21787 PhysicalDeviceFeatures2KHR features;
21788 vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2KHR*>( &features ) );
21789 return features;
21790 }
21791#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21792
21793 VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( PhysicalDeviceProperties2KHR* pProperties ) const
21794 {
21795 vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( pProperties ) );
21796 }
21797#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21798 VULKAN_HPP_INLINE PhysicalDeviceProperties2KHR PhysicalDevice::getProperties2KHR() const
21799 {
21800 PhysicalDeviceProperties2KHR properties;
21801 vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2KHR*>( &properties ) );
21802 return properties;
21803 }
21804#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21805
21806 VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( Format format, FormatProperties2KHR* pFormatProperties ) const
21807 {
21808 vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( pFormatProperties ) );
21809 }
21810#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21811 VULKAN_HPP_INLINE FormatProperties2KHR PhysicalDevice::getFormatProperties2KHR( Format format ) const
21812 {
21813 FormatProperties2KHR formatProperties;
21814 vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2KHR*>( &formatProperties ) );
21815 return formatProperties;
21816 }
21817#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21818
21819 VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo, ImageFormatProperties2KHR* pImageFormatProperties ) const
21820 {
21821 return static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( pImageFormatProperties ) ) );
21822 }
21823#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21824 VULKAN_HPP_INLINE ResultValueType<ImageFormatProperties2KHR>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2KHR & imageFormatInfo ) const
21825 {
21826 ImageFormatProperties2KHR imageFormatProperties;
21827 Result result = static_cast<Result>( vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2KHR*>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2KHR*>( &imageFormatProperties ) ) );
21828 return createResultValue( result, imageFormatProperties, "vk::PhysicalDevice::getImageFormatProperties2KHR" );
21829 }
21830#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21831
21832 VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, QueueFamilyProperties2KHR* pQueueFamilyProperties ) const
21833 {
21834 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( pQueueFamilyProperties ) );
21835 }
21836#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21837 template <typename Allocator>
21838 VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2KHR,Allocator> PhysicalDevice::getQueueFamilyProperties2KHR() const
21839 {
21840 std::vector<QueueFamilyProperties2KHR,Allocator> queueFamilyProperties;
21841 uint32_t queueFamilyPropertyCount;
21842 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
21843 queueFamilyProperties.resize( queueFamilyPropertyCount );
21844 vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2KHR*>( queueFamilyProperties.data() ) );
21845 return queueFamilyProperties;
21846 }
21847#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21848
21849 VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( PhysicalDeviceMemoryProperties2KHR* pMemoryProperties ) const
21850 {
21851 vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( pMemoryProperties ) );
21852 }
21853#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21854 VULKAN_HPP_INLINE PhysicalDeviceMemoryProperties2KHR PhysicalDevice::getMemoryProperties2KHR() const
21855 {
21856 PhysicalDeviceMemoryProperties2KHR memoryProperties;
21857 vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2KHR*>( &memoryProperties ) );
21858 return memoryProperties;
21859 }
21860#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21861
21862 VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR* pFormatInfo, uint32_t* pPropertyCount, SparseImageFormatProperties2KHR* pProperties ) const
21863 {
21864 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( pProperties ) );
21865 }
21866#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21867 template <typename Allocator>
21868 VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2KHR,Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2KHR & formatInfo ) const
21869 {
21870 std::vector<SparseImageFormatProperties2KHR,Allocator> properties;
21871 uint32_t propertyCount;
21872 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, nullptr );
21873 properties.resize( propertyCount );
21874 vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2KHR*>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2KHR*>( properties.data() ) );
21875 return properties;
21876 }
21877#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21878
21879#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
21880 VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( DisplayKHR display ) const
21881 {
21882 return static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
21883 }
21884#else
21885 VULKAN_HPP_INLINE ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( DisplayKHR display ) const
21886 {
21887 Result result = static_cast<Result>( vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
21888 return createResultValue( result, "vk::PhysicalDevice::releaseDisplayEXT" );
21889 }
21890#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21891
21892#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
21893 VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, DisplayKHR display ) const
21894 {
21895 return static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
21896 }
21897#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21898 VULKAN_HPP_INLINE ResultValueType<Display>::type PhysicalDevice::acquireXlibDisplayEXT( DisplayKHR display ) const
21899 {
21900 Display dpy;
21901 Result result = static_cast<Result>( vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
21902 return createResultValue( result, dpy, "vk::PhysicalDevice::acquireXlibDisplayEXT" );
21903 }
21904#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21905#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
21906
21907#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
21908 VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, DisplayKHR* pDisplay ) const
21909 {
21910 return static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( pDisplay ) ) );
21911 }
21912#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21913 VULKAN_HPP_INLINE ResultValueType<DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const
21914 {
21915 DisplayKHR display;
21916 Result result = static_cast<Result>( vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR*>( &display ) ) );
21917 return createResultValue( result, display, "vk::PhysicalDevice::getRandROutputDisplayEXT" );
21918 }
21919#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21920#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
21921
21922 VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface, SurfaceCapabilities2EXT* pSurfaceCapabilities ) const
21923 {
21924 return static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( pSurfaceCapabilities ) ) );
21925 }
21926#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
21927 VULKAN_HPP_INLINE ResultValueType<SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( SurfaceKHR surface ) const
21928 {
21929 SurfaceCapabilities2EXT surfaceCapabilities;
21930 Result result = static_cast<Result>( vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT*>( &surfaceCapabilities ) ) );
21931 return createResultValue( result, surfaceCapabilities, "vk::PhysicalDevice::getSurfaceCapabilities2EXT" );
21932 }
21933#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21934#ifndef VULKAN_HPP_NO_SMART_HANDLE
21935 class DebugReportCallbackEXTDeleter;
21936 using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, DebugReportCallbackEXTDeleter>;
21937 class SurfaceKHRDeleter;
21938 using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, SurfaceKHRDeleter>;
21939#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
21940
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021941 class Instance
21942 {
21943 public:
21944 Instance()
21945 : m_instance(VK_NULL_HANDLE)
21946 {}
21947
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070021948 Instance( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021949 : m_instance(VK_NULL_HANDLE)
21950 {}
21951
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021952#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
21953 Instance(VkInstance instance)
21954 : m_instance(instance)
21955 {}
21956
21957 Instance& operator=(VkInstance instance)
21958 {
21959 m_instance = instance;
21960 return *this;
21961 }
21962#endif
21963
Mark Lobodzinskicd306ab2017-02-14 16:09:03 -070021964 Instance& operator=( std::nullptr_t )
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021965 {
21966 m_instance = VK_NULL_HANDLE;
21967 return *this;
21968 }
21969
Lenny Komowebf33162016-08-26 14:10:08 -060021970 bool operator==(Instance const &rhs) const
21971 {
21972 return m_instance == rhs.m_instance;
21973 }
21974
21975 bool operator!=(Instance const &rhs) const
21976 {
21977 return m_instance != rhs.m_instance;
21978 }
21979
21980 bool operator<(Instance const &rhs) const
21981 {
21982 return m_instance < rhs.m_instance;
21983 }
21984
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021985 void destroy( const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021986#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021987 void destroy( Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021988#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21989
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021990 Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021991#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021992 template <typename Allocator = std::allocator<PhysicalDevice>>
21993 typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type enumeratePhysicalDevices() const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021994#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
21995
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021996 PFN_vkVoidFunction getProcAddr( const char* pName ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021997#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070021998 PFN_vkVoidFunction getProcAddr( const std::string & name ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060021999#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22000
22001#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022002 Result createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022003#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022004 ResultValueType<SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22005#ifndef VULKAN_HPP_NO_SMART_HANDLE
22006 UniqueSurfaceKHR createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22007#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022008#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022009#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022010
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022011 Result createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022012#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022013 ResultValueType<SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22014#ifndef VULKAN_HPP_NO_SMART_HANDLE
22015 UniqueSurfaceKHR createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22016#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022017#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22018
22019#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022020 Result createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022021#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022022 ResultValueType<SurfaceKHR>::type createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22023#ifndef VULKAN_HPP_NO_SMART_HANDLE
22024 UniqueSurfaceKHR createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22025#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022026#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022027#endif /*VK_USE_PLATFORM_MIR_KHR*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022028
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022029 void destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022030#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022031 void destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022032#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22033
Mark Young39389872017-01-19 21:10:49 -070022034#ifdef VK_USE_PLATFORM_VI_NN
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022035 Result createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
22036#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22037 ResultValueType<SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22038#ifndef VULKAN_HPP_NO_SMART_HANDLE
22039 UniqueSurfaceKHR createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22040#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22041#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Young39389872017-01-19 21:10:49 -070022042#endif /*VK_USE_PLATFORM_VI_NN*/
22043
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022044#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022045 Result createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
22046#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22047 ResultValueType<SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22048#ifndef VULKAN_HPP_NO_SMART_HANDLE
22049 UniqueSurfaceKHR createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22050#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22051#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022052#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
22053
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022054#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022055 Result createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
22056#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22057 ResultValueType<SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22058#ifndef VULKAN_HPP_NO_SMART_HANDLE
22059 UniqueSurfaceKHR createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22060#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22061#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022062#endif /*VK_USE_PLATFORM_WIN32_KHR*/
22063
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022064#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022065 Result createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
22066#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22067 ResultValueType<SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22068#ifndef VULKAN_HPP_NO_SMART_HANDLE
22069 UniqueSurfaceKHR createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22070#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22071#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022072#endif /*VK_USE_PLATFORM_XLIB_KHR*/
22073
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022074#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022075 Result createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const;
22076#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22077 ResultValueType<SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22078#ifndef VULKAN_HPP_NO_SMART_HANDLE
22079 UniqueSurfaceKHR createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22080#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22081#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022082#endif /*VK_USE_PLATFORM_XCB_KHR*/
22083
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022084 Result createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022085#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022086 ResultValueType<DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22087#ifndef VULKAN_HPP_NO_SMART_HANDLE
22088 UniqueDebugReportCallbackEXT createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator = nullptr ) const;
22089#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022090#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22091
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022092 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022093#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022094 void destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator = nullptr ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022095#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22096
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022097 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022098#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022099 void debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const;
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022100#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22101
22102#if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
22103 explicit
22104#endif
22105 operator VkInstance() const
22106 {
22107 return m_instance;
22108 }
22109
22110 explicit operator bool() const
22111 {
22112 return m_instance != VK_NULL_HANDLE;
22113 }
22114
22115 bool operator!() const
22116 {
22117 return m_instance == VK_NULL_HANDLE;
22118 }
22119
22120 private:
22121 VkInstance m_instance;
22122 };
22123 static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
22124
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022125#ifndef VULKAN_HPP_NO_SMART_HANDLE
22126 class DebugReportCallbackEXTDeleter
22127 {
22128 public:
22129 DebugReportCallbackEXTDeleter( Instance instance = Instance(), Optional<const AllocationCallbacks> allocator = nullptr )
22130 : m_instance( instance )
22131 , m_allocator( allocator )
22132 {}
22133
22134 void operator()( DebugReportCallbackEXT debugReportCallbackEXT )
22135 {
22136 m_instance.destroyDebugReportCallbackEXT( debugReportCallbackEXT, m_allocator );
22137 }
22138
22139 private:
22140 Instance m_instance;
22141 Optional<const AllocationCallbacks> m_allocator;
22142 };
22143
22144 class SurfaceKHRDeleter
22145 {
22146 public:
22147 SurfaceKHRDeleter( Instance instance = Instance(), Optional<const AllocationCallbacks> allocator = nullptr )
22148 : m_instance( instance )
22149 , m_allocator( allocator )
22150 {}
22151
22152 void operator()( SurfaceKHR surfaceKHR )
22153 {
22154 m_instance.destroySurfaceKHR( surfaceKHR, m_allocator );
22155 }
22156
22157 private:
22158 Instance m_instance;
22159 Optional<const AllocationCallbacks> m_allocator;
22160 };
22161#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22162
22163 VULKAN_HPP_INLINE void Instance::destroy( const AllocationCallbacks* pAllocator ) const
22164 {
22165 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
22166 }
22167#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22168 VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator ) const
22169 {
22170 vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
22171 }
22172#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22173
22174 VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, PhysicalDevice* pPhysicalDevices ) const
22175 {
22176 return static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( pPhysicalDevices ) ) );
22177 }
22178#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22179 template <typename Allocator>
22180 VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice,Allocator>>::type Instance::enumeratePhysicalDevices() const
22181 {
22182 std::vector<PhysicalDevice,Allocator> physicalDevices;
22183 uint32_t physicalDeviceCount;
22184 Result result;
22185 do
22186 {
22187 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
22188 if ( ( result == Result::eSuccess ) && physicalDeviceCount )
22189 {
22190 physicalDevices.resize( physicalDeviceCount );
22191 result = static_cast<Result>( vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice*>( physicalDevices.data() ) ) );
22192 }
22193 } while ( result == Result::eIncomplete );
22194 assert( physicalDeviceCount <= physicalDevices.size() );
22195 physicalDevices.resize( physicalDeviceCount );
22196 return createResultValue( result, physicalDevices, "vk::Instance::enumeratePhysicalDevices" );
22197 }
22198#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22199
22200 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName ) const
22201 {
22202 return vkGetInstanceProcAddr( m_instance, pName );
22203 }
22204#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22205 VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const
22206 {
22207 return vkGetInstanceProcAddr( m_instance, name.c_str() );
22208 }
22209#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22210
22211#ifdef VK_USE_PLATFORM_ANDROID_KHR
22212 VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22213 {
22214 return static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22215 }
22216#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22217 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22218 {
22219 SurfaceKHR surface;
22220 Result result = static_cast<Result>( vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22221 return createResultValue( result, surface, "vk::Instance::createAndroidSurfaceKHR" );
22222 }
22223#ifndef VULKAN_HPP_NO_SMART_HANDLE
22224 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22225 {
22226 SurfaceKHRDeleter deleter( *this, allocator );
22227 return UniqueSurfaceKHR( createAndroidSurfaceKHR( createInfo, allocator ), deleter );
22228 }
22229#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22230#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22231#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
22232
22233 VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22234 {
22235 return static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22236 }
22237#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22238 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22239 {
22240 SurfaceKHR surface;
22241 Result result = static_cast<Result>( vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22242 return createResultValue( result, surface, "vk::Instance::createDisplayPlaneSurfaceKHR" );
22243 }
22244#ifndef VULKAN_HPP_NO_SMART_HANDLE
22245 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22246 {
22247 SurfaceKHRDeleter deleter( *this, allocator );
22248 return UniqueSurfaceKHR( createDisplayPlaneSurfaceKHR( createInfo, allocator ), deleter );
22249 }
22250#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22251#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22252
22253#ifdef VK_USE_PLATFORM_MIR_KHR
22254 VULKAN_HPP_INLINE Result Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22255 {
22256 return static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22257 }
22258#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22259 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createMirSurfaceKHR( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22260 {
22261 SurfaceKHR surface;
22262 Result result = static_cast<Result>( vkCreateMirSurfaceKHR( m_instance, reinterpret_cast<const VkMirSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22263 return createResultValue( result, surface, "vk::Instance::createMirSurfaceKHR" );
22264 }
22265#ifndef VULKAN_HPP_NO_SMART_HANDLE
22266 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createMirSurfaceKHRUnique( const MirSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22267 {
22268 SurfaceKHRDeleter deleter( *this, allocator );
22269 return UniqueSurfaceKHR( createMirSurfaceKHR( createInfo, allocator ), deleter );
22270 }
22271#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22272#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22273#endif /*VK_USE_PLATFORM_MIR_KHR*/
22274
22275 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, const AllocationCallbacks* pAllocator ) const
22276 {
22277 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
22278 }
22279#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22280 VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( SurfaceKHR surface, Optional<const AllocationCallbacks> allocator ) const
22281 {
22282 vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
22283 }
22284#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22285
22286#ifdef VK_USE_PLATFORM_VI_NN
22287 VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22288 {
22289 return static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22290 }
22291#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22292 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator ) const
22293 {
22294 SurfaceKHR surface;
22295 Result result = static_cast<Result>( vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22296 return createResultValue( result, surface, "vk::Instance::createViSurfaceNN" );
22297 }
22298#ifndef VULKAN_HPP_NO_SMART_HANDLE
22299 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator ) const
22300 {
22301 SurfaceKHRDeleter deleter( *this, allocator );
22302 return UniqueSurfaceKHR( createViSurfaceNN( createInfo, allocator ), deleter );
22303 }
22304#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22305#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22306#endif /*VK_USE_PLATFORM_VI_NN*/
22307
22308#ifdef VK_USE_PLATFORM_WAYLAND_KHR
22309 VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22310 {
22311 return static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22312 }
22313#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22314 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22315 {
22316 SurfaceKHR surface;
22317 Result result = static_cast<Result>( vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22318 return createResultValue( result, surface, "vk::Instance::createWaylandSurfaceKHR" );
22319 }
22320#ifndef VULKAN_HPP_NO_SMART_HANDLE
22321 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22322 {
22323 SurfaceKHRDeleter deleter( *this, allocator );
22324 return UniqueSurfaceKHR( createWaylandSurfaceKHR( createInfo, allocator ), deleter );
22325 }
22326#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22327#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22328#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
22329
22330#ifdef VK_USE_PLATFORM_WIN32_KHR
22331 VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22332 {
22333 return static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22334 }
22335#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22336 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22337 {
22338 SurfaceKHR surface;
22339 Result result = static_cast<Result>( vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22340 return createResultValue( result, surface, "vk::Instance::createWin32SurfaceKHR" );
22341 }
22342#ifndef VULKAN_HPP_NO_SMART_HANDLE
22343 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22344 {
22345 SurfaceKHRDeleter deleter( *this, allocator );
22346 return UniqueSurfaceKHR( createWin32SurfaceKHR( createInfo, allocator ), deleter );
22347 }
22348#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22349#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22350#endif /*VK_USE_PLATFORM_WIN32_KHR*/
22351
22352#ifdef VK_USE_PLATFORM_XLIB_KHR
22353 VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22354 {
22355 return static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22356 }
22357#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22358 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22359 {
22360 SurfaceKHR surface;
22361 Result result = static_cast<Result>( vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22362 return createResultValue( result, surface, "vk::Instance::createXlibSurfaceKHR" );
22363 }
22364#ifndef VULKAN_HPP_NO_SMART_HANDLE
22365 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22366 {
22367 SurfaceKHRDeleter deleter( *this, allocator );
22368 return UniqueSurfaceKHR( createXlibSurfaceKHR( createInfo, allocator ), deleter );
22369 }
22370#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22371#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22372#endif /*VK_USE_PLATFORM_XLIB_KHR*/
22373
22374#ifdef VK_USE_PLATFORM_XCB_KHR
22375 VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR* pCreateInfo, const AllocationCallbacks* pAllocator, SurfaceKHR* pSurface ) const
22376 {
22377 return static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkSurfaceKHR*>( pSurface ) ) );
22378 }
22379#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22380 VULKAN_HPP_INLINE ResultValueType<SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22381 {
22382 SurfaceKHR surface;
22383 Result result = static_cast<Result>( vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkSurfaceKHR*>( &surface ) ) );
22384 return createResultValue( result, surface, "vk::Instance::createXcbSurfaceKHR" );
22385 }
22386#ifndef VULKAN_HPP_NO_SMART_HANDLE
22387 VULKAN_HPP_INLINE UniqueSurfaceKHR Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator ) const
22388 {
22389 SurfaceKHRDeleter deleter( *this, allocator );
22390 return UniqueSurfaceKHR( createXcbSurfaceKHR( createInfo, allocator ), deleter );
22391 }
22392#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22393#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22394#endif /*VK_USE_PLATFORM_XCB_KHR*/
22395
22396 VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT* pCreateInfo, const AllocationCallbacks* pAllocator, DebugReportCallbackEXT* pCallback ) const
22397 {
22398 return static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT*>( pCallback ) ) );
22399 }
22400#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22401 VULKAN_HPP_INLINE ResultValueType<DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator ) const
22402 {
22403 DebugReportCallbackEXT callback;
22404 Result result = static_cast<Result>( vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkDebugReportCallbackEXT*>( &callback ) ) );
22405 return createResultValue( result, callback, "vk::Instance::createDebugReportCallbackEXT" );
22406 }
22407#ifndef VULKAN_HPP_NO_SMART_HANDLE
22408 VULKAN_HPP_INLINE UniqueDebugReportCallbackEXT Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator ) const
22409 {
22410 DebugReportCallbackEXTDeleter deleter( *this, allocator );
22411 return UniqueDebugReportCallbackEXT( createDebugReportCallbackEXT( createInfo, allocator ), deleter );
22412 }
22413#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22414#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22415
22416 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, const AllocationCallbacks* pAllocator ) const
22417 {
22418 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ) );
22419 }
22420#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22421 VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator ) const
22422 {
22423 vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)) );
22424 }
22425#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22426
22427 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const
22428 {
22429 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
22430 }
22431#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22432 VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message ) const
22433 {
22434#ifdef VULKAN_HPP_NO_EXCEPTIONS
22435 assert( layerPrefix.size() == message.size() );
22436#else
22437 if ( layerPrefix.size() != message.size() )
22438 {
22439 throw std::logic_error( "vk::Instance::debugReportMessageEXT: layerPrefix.size() != message.size()" );
22440 }
22441#endif // VULKAN_HPP_NO_EXCEPTIONS
22442 vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
22443 }
22444#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022445 struct CmdProcessCommandsInfoNVX
Lenny Komow68432d72016-09-29 14:16:59 -060022446 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022447 CmdProcessCommandsInfoNVX( ObjectTableNVX objectTable_ = ObjectTableNVX(), IndirectCommandsLayoutNVX indirectCommandsLayout_ = IndirectCommandsLayoutNVX(), uint32_t indirectCommandsTokenCount_ = 0, const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = nullptr, uint32_t maxSequencesCount_ = 0, CommandBuffer targetCommandBuffer_ = CommandBuffer(), Buffer sequencesCountBuffer_ = Buffer(), DeviceSize sequencesCountOffset_ = 0, Buffer sequencesIndexBuffer_ = Buffer(), DeviceSize sequencesIndexOffset_ = 0 )
22448 : sType( StructureType::eCmdProcessCommandsInfoNVX )
Lenny Komow68432d72016-09-29 14:16:59 -060022449 , pNext( nullptr )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022450 , objectTable( objectTable_ )
22451 , indirectCommandsLayout( indirectCommandsLayout_ )
22452 , indirectCommandsTokenCount( indirectCommandsTokenCount_ )
22453 , pIndirectCommandsTokens( pIndirectCommandsTokens_ )
22454 , maxSequencesCount( maxSequencesCount_ )
22455 , targetCommandBuffer( targetCommandBuffer_ )
22456 , sequencesCountBuffer( sequencesCountBuffer_ )
22457 , sequencesCountOffset( sequencesCountOffset_ )
22458 , sequencesIndexBuffer( sequencesIndexBuffer_ )
22459 , sequencesIndexOffset( sequencesIndexOffset_ )
Lenny Komow68432d72016-09-29 14:16:59 -060022460 {
22461 }
22462
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022463 CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow68432d72016-09-29 14:16:59 -060022464 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022465 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow68432d72016-09-29 14:16:59 -060022466 }
22467
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022468 CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs )
Lenny Komow68432d72016-09-29 14:16:59 -060022469 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022470 memcpy( this, &rhs, sizeof(CmdProcessCommandsInfoNVX) );
Lenny Komow68432d72016-09-29 14:16:59 -060022471 return *this;
22472 }
22473
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022474 CmdProcessCommandsInfoNVX& setPNext( const void* pNext_ )
Lenny Komow68432d72016-09-29 14:16:59 -060022475 {
22476 pNext = pNext_;
22477 return *this;
22478 }
22479
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022480 CmdProcessCommandsInfoNVX& setObjectTable( ObjectTableNVX objectTable_ )
Lenny Komow68432d72016-09-29 14:16:59 -060022481 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022482 objectTable = objectTable_;
Lenny Komow68432d72016-09-29 14:16:59 -060022483 return *this;
22484 }
22485
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022486 CmdProcessCommandsInfoNVX& setIndirectCommandsLayout( IndirectCommandsLayoutNVX indirectCommandsLayout_ )
Lenny Komow68432d72016-09-29 14:16:59 -060022487 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022488 indirectCommandsLayout = indirectCommandsLayout_;
Lenny Komow68432d72016-09-29 14:16:59 -060022489 return *this;
22490 }
22491
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022492 CmdProcessCommandsInfoNVX& setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ )
Lenny Komow68432d72016-09-29 14:16:59 -060022493 {
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022494 indirectCommandsTokenCount = indirectCommandsTokenCount_;
22495 return *this;
Lenny Komow68432d72016-09-29 14:16:59 -060022496 }
22497
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022498 CmdProcessCommandsInfoNVX& setPIndirectCommandsTokens( const IndirectCommandsTokenNVX* pIndirectCommandsTokens_ )
22499 {
22500 pIndirectCommandsTokens = pIndirectCommandsTokens_;
22501 return *this;
22502 }
22503
22504 CmdProcessCommandsInfoNVX& setMaxSequencesCount( uint32_t maxSequencesCount_ )
22505 {
22506 maxSequencesCount = maxSequencesCount_;
22507 return *this;
22508 }
22509
22510 CmdProcessCommandsInfoNVX& setTargetCommandBuffer( CommandBuffer targetCommandBuffer_ )
22511 {
22512 targetCommandBuffer = targetCommandBuffer_;
22513 return *this;
22514 }
22515
22516 CmdProcessCommandsInfoNVX& setSequencesCountBuffer( Buffer sequencesCountBuffer_ )
22517 {
22518 sequencesCountBuffer = sequencesCountBuffer_;
22519 return *this;
22520 }
22521
22522 CmdProcessCommandsInfoNVX& setSequencesCountOffset( DeviceSize sequencesCountOffset_ )
22523 {
22524 sequencesCountOffset = sequencesCountOffset_;
22525 return *this;
22526 }
22527
22528 CmdProcessCommandsInfoNVX& setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ )
22529 {
22530 sequencesIndexBuffer = sequencesIndexBuffer_;
22531 return *this;
22532 }
22533
22534 CmdProcessCommandsInfoNVX& setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ )
22535 {
22536 sequencesIndexOffset = sequencesIndexOffset_;
22537 return *this;
22538 }
22539
22540 operator const VkCmdProcessCommandsInfoNVX&() const
22541 {
22542 return *reinterpret_cast<const VkCmdProcessCommandsInfoNVX*>(this);
22543 }
22544
22545 bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow68432d72016-09-29 14:16:59 -060022546 {
22547 return ( sType == rhs.sType )
22548 && ( pNext == rhs.pNext )
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022549 && ( objectTable == rhs.objectTable )
22550 && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
22551 && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount )
22552 && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens )
22553 && ( maxSequencesCount == rhs.maxSequencesCount )
22554 && ( targetCommandBuffer == rhs.targetCommandBuffer )
22555 && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
22556 && ( sequencesCountOffset == rhs.sequencesCountOffset )
22557 && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
22558 && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
Lenny Komow68432d72016-09-29 14:16:59 -060022559 }
22560
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022561 bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const
Lenny Komow68432d72016-09-29 14:16:59 -060022562 {
22563 return !operator==( rhs );
22564 }
22565
22566 private:
22567 StructureType sType;
22568
22569 public:
22570 const void* pNext;
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022571 ObjectTableNVX objectTable;
22572 IndirectCommandsLayoutNVX indirectCommandsLayout;
22573 uint32_t indirectCommandsTokenCount;
22574 const IndirectCommandsTokenNVX* pIndirectCommandsTokens;
22575 uint32_t maxSequencesCount;
22576 CommandBuffer targetCommandBuffer;
22577 Buffer sequencesCountBuffer;
22578 DeviceSize sequencesCountOffset;
22579 Buffer sequencesIndexBuffer;
22580 DeviceSize sequencesIndexOffset;
Lenny Komow68432d72016-09-29 14:16:59 -060022581 };
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022582 static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" );
Lenny Komow68432d72016-09-29 14:16:59 -060022583
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022584#ifndef VULKAN_HPP_NO_SMART_HANDLE
22585 class InstanceDeleter;
22586 using UniqueInstance = UniqueHandle<Instance, InstanceDeleter>;
22587#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22588
22589 Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance );
22590#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
22591 ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr );
22592#ifndef VULKAN_HPP_NO_SMART_HANDLE
22593 UniqueInstance createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator = nullptr );
22594#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22595#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22596
22597#ifndef VULKAN_HPP_NO_SMART_HANDLE
22598 class InstanceDeleter
22599 {
22600 public:
22601 InstanceDeleter( Optional<const AllocationCallbacks> allocator = nullptr )
22602 : m_allocator( allocator )
22603 {}
22604
22605 void operator()( Instance instance )
22606 {
22607 instance.destroy( m_allocator );
22608 }
22609
22610 private:
22611 Optional<const AllocationCallbacks> m_allocator;
22612 };
22613#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
22614
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022615 VULKAN_HPP_INLINE Result createInstance( const InstanceCreateInfo* pCreateInfo, const AllocationCallbacks* pAllocator, Instance* pInstance )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022616 {
22617 return static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks*>( pAllocator ), reinterpret_cast<VkInstance*>( pInstance ) ) );
22618 }
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022619#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022620 VULKAN_HPP_INLINE ResultValueType<Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator )
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022621 {
22622 Instance instance;
22623 Result result = static_cast<Result>( vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo*>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks*>( static_cast<const AllocationCallbacks*>( allocator)), reinterpret_cast<VkInstance*>( &instance ) ) );
22624 return createResultValue( result, instance, "vk::createInstance" );
22625 }
Mark Lobodzinski36c33862017-02-13 10:15:53 -070022626#ifndef VULKAN_HPP_NO_SMART_HANDLE
22627 VULKAN_HPP_INLINE UniqueInstance createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator )
22628 {
22629 InstanceDeleter deleter( allocator );
22630 return UniqueInstance( createInstance( createInfo, allocator ), deleter );
22631 }
22632#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022633#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
22634
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022635 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022636 {
22637 return "(void)";
22638 }
22639
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022640 VULKAN_HPP_INLINE std::string to_string(FramebufferCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022641 {
22642 return "{}";
22643 }
22644
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022645 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022646 {
22647 return "(void)";
22648 }
22649
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022650 VULKAN_HPP_INLINE std::string to_string(QueryPoolCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022651 {
22652 return "{}";
22653 }
22654
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022655 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022656 {
22657 return "(void)";
22658 }
22659
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022660 VULKAN_HPP_INLINE std::string to_string(RenderPassCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022661 {
22662 return "{}";
22663 }
22664
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022665 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022666 {
22667 return "(void)";
22668 }
22669
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022670 VULKAN_HPP_INLINE std::string to_string(SamplerCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022671 {
22672 return "{}";
22673 }
22674
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022675 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022676 {
22677 return "(void)";
22678 }
22679
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022680 VULKAN_HPP_INLINE std::string to_string(PipelineLayoutCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022681 {
22682 return "{}";
22683 }
22684
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022685 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022686 {
22687 return "(void)";
22688 }
22689
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022690 VULKAN_HPP_INLINE std::string to_string(PipelineCacheCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022691 {
22692 return "{}";
22693 }
22694
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022695 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022696 {
22697 return "(void)";
22698 }
22699
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022700 VULKAN_HPP_INLINE std::string to_string(PipelineDepthStencilStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022701 {
22702 return "{}";
22703 }
22704
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022705 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022706 {
22707 return "(void)";
22708 }
22709
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022710 VULKAN_HPP_INLINE std::string to_string(PipelineDynamicStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022711 {
22712 return "{}";
22713 }
22714
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022715 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022716 {
22717 return "(void)";
22718 }
22719
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022720 VULKAN_HPP_INLINE std::string to_string(PipelineColorBlendStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022721 {
22722 return "{}";
22723 }
22724
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022725 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022726 {
22727 return "(void)";
22728 }
22729
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022730 VULKAN_HPP_INLINE std::string to_string(PipelineMultisampleStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022731 {
22732 return "{}";
22733 }
22734
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022735 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022736 {
22737 return "(void)";
22738 }
22739
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022740 VULKAN_HPP_INLINE std::string to_string(PipelineRasterizationStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022741 {
22742 return "{}";
22743 }
22744
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022745 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022746 {
22747 return "(void)";
22748 }
22749
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022750 VULKAN_HPP_INLINE std::string to_string(PipelineViewportStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022751 {
22752 return "{}";
22753 }
22754
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022755 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022756 {
22757 return "(void)";
22758 }
22759
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022760 VULKAN_HPP_INLINE std::string to_string(PipelineTessellationStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022761 {
22762 return "{}";
22763 }
22764
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022765 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022766 {
22767 return "(void)";
22768 }
22769
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022770 VULKAN_HPP_INLINE std::string to_string(PipelineInputAssemblyStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022771 {
22772 return "{}";
22773 }
22774
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022775 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022776 {
22777 return "(void)";
22778 }
22779
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022780 VULKAN_HPP_INLINE std::string to_string(PipelineVertexInputStateCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022781 {
22782 return "{}";
22783 }
22784
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022785 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022786 {
22787 return "(void)";
22788 }
22789
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022790 VULKAN_HPP_INLINE std::string to_string(PipelineShaderStageCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022791 {
22792 return "{}";
22793 }
22794
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022795 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022796 {
22797 return "(void)";
22798 }
22799
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022800 VULKAN_HPP_INLINE std::string to_string(DescriptorSetLayoutCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022801 {
22802 return "{}";
22803 }
22804
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022805 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022806 {
22807 return "(void)";
22808 }
22809
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022810 VULKAN_HPP_INLINE std::string to_string(BufferViewCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022811 {
22812 return "{}";
22813 }
22814
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022815 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022816 {
22817 return "(void)";
22818 }
22819
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022820 VULKAN_HPP_INLINE std::string to_string(InstanceCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022821 {
22822 return "{}";
22823 }
22824
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022825 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022826 {
22827 return "(void)";
22828 }
22829
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022830 VULKAN_HPP_INLINE std::string to_string(DeviceCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022831 {
22832 return "{}";
22833 }
22834
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022835 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022836 {
22837 return "(void)";
22838 }
22839
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022840 VULKAN_HPP_INLINE std::string to_string(DeviceQueueCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022841 {
22842 return "{}";
22843 }
22844
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022845 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022846 {
22847 return "(void)";
22848 }
22849
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022850 VULKAN_HPP_INLINE std::string to_string(ImageViewCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022851 {
22852 return "{}";
22853 }
22854
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022855 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022856 {
22857 return "(void)";
22858 }
22859
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022860 VULKAN_HPP_INLINE std::string to_string(SemaphoreCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022861 {
22862 return "{}";
22863 }
22864
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022865 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022866 {
22867 return "(void)";
22868 }
22869
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022870 VULKAN_HPP_INLINE std::string to_string(ShaderModuleCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022871 {
22872 return "{}";
22873 }
22874
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022875 VULKAN_HPP_INLINE std::string to_string(EventCreateFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022876 {
22877 return "(void)";
22878 }
22879
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022880 VULKAN_HPP_INLINE std::string to_string(EventCreateFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022881 {
22882 return "{}";
22883 }
22884
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022885 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022886 {
22887 return "(void)";
22888 }
22889
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022890 VULKAN_HPP_INLINE std::string to_string(MemoryMapFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022891 {
22892 return "{}";
22893 }
22894
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022895 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022896 {
22897 return "(void)";
22898 }
22899
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022900 VULKAN_HPP_INLINE std::string to_string(SubpassDescriptionFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022901 {
22902 return "{}";
22903 }
22904
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022905 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlagBits)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022906 {
22907 return "(void)";
22908 }
22909
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022910 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolResetFlags)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022911 {
22912 return "{}";
22913 }
22914
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022915 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022916 {
22917 return "(void)";
22918 }
22919
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022920 VULKAN_HPP_INLINE std::string to_string(SwapchainCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022921 {
22922 return "{}";
22923 }
22924
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022925 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022926 {
22927 return "(void)";
22928 }
22929
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022930 VULKAN_HPP_INLINE std::string to_string(DisplayModeCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022931 {
22932 return "{}";
22933 }
22934
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022935 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022936 {
22937 return "(void)";
22938 }
22939
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022940 VULKAN_HPP_INLINE std::string to_string(DisplaySurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022941 {
22942 return "{}";
22943 }
22944
22945#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022946 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022947 {
22948 return "(void)";
22949 }
22950#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
22951
22952#ifdef VK_USE_PLATFORM_ANDROID_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022953 VULKAN_HPP_INLINE std::string to_string(AndroidSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022954 {
22955 return "{}";
22956 }
22957#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
22958
22959#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022960 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022961 {
22962 return "(void)";
22963 }
22964#endif /*VK_USE_PLATFORM_MIR_KHR*/
22965
22966#ifdef VK_USE_PLATFORM_MIR_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022967 VULKAN_HPP_INLINE std::string to_string(MirSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022968 {
22969 return "{}";
22970 }
22971#endif /*VK_USE_PLATFORM_MIR_KHR*/
22972
Mark Young39389872017-01-19 21:10:49 -070022973#ifdef VK_USE_PLATFORM_VI_NN
22974 VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagBitsNN)
22975 {
22976 return "(void)";
22977 }
22978#endif /*VK_USE_PLATFORM_VI_NN*/
22979
22980#ifdef VK_USE_PLATFORM_VI_NN
22981 VULKAN_HPP_INLINE std::string to_string(ViSurfaceCreateFlagsNN)
22982 {
22983 return "{}";
22984 }
22985#endif /*VK_USE_PLATFORM_VI_NN*/
22986
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022987#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022988 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022989 {
22990 return "(void)";
22991 }
22992#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
22993
22994#ifdef VK_USE_PLATFORM_WAYLAND_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070022995 VULKAN_HPP_INLINE std::string to_string(WaylandSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060022996 {
22997 return "{}";
22998 }
22999#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
23000
23001#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023002 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023003 {
23004 return "(void)";
23005 }
23006#endif /*VK_USE_PLATFORM_WIN32_KHR*/
23007
23008#ifdef VK_USE_PLATFORM_WIN32_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023009 VULKAN_HPP_INLINE std::string to_string(Win32SurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023010 {
23011 return "{}";
23012 }
23013#endif /*VK_USE_PLATFORM_WIN32_KHR*/
23014
23015#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023016 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023017 {
23018 return "(void)";
23019 }
23020#endif /*VK_USE_PLATFORM_XLIB_KHR*/
23021
23022#ifdef VK_USE_PLATFORM_XLIB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023023 VULKAN_HPP_INLINE std::string to_string(XlibSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023024 {
23025 return "{}";
23026 }
23027#endif /*VK_USE_PLATFORM_XLIB_KHR*/
23028
23029#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023030 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagBitsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023031 {
23032 return "(void)";
23033 }
23034#endif /*VK_USE_PLATFORM_XCB_KHR*/
23035
23036#ifdef VK_USE_PLATFORM_XCB_KHR
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023037 VULKAN_HPP_INLINE std::string to_string(XcbSurfaceCreateFlagsKHR)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023038 {
23039 return "{}";
23040 }
23041#endif /*VK_USE_PLATFORM_XCB_KHR*/
23042
Mark Young39389872017-01-19 21:10:49 -070023043 VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagBitsKHR)
23044 {
23045 return "(void)";
23046 }
23047
23048 VULKAN_HPP_INLINE std::string to_string(CommandPoolTrimFlagsKHR)
23049 {
23050 return "{}";
23051 }
23052
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023053 VULKAN_HPP_INLINE std::string to_string(ImageLayout value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023054 {
23055 switch (value)
23056 {
23057 case ImageLayout::eUndefined: return "Undefined";
23058 case ImageLayout::eGeneral: return "General";
23059 case ImageLayout::eColorAttachmentOptimal: return "ColorAttachmentOptimal";
23060 case ImageLayout::eDepthStencilAttachmentOptimal: return "DepthStencilAttachmentOptimal";
23061 case ImageLayout::eDepthStencilReadOnlyOptimal: return "DepthStencilReadOnlyOptimal";
23062 case ImageLayout::eShaderReadOnlyOptimal: return "ShaderReadOnlyOptimal";
23063 case ImageLayout::eTransferSrcOptimal: return "TransferSrcOptimal";
23064 case ImageLayout::eTransferDstOptimal: return "TransferDstOptimal";
23065 case ImageLayout::ePreinitialized: return "Preinitialized";
23066 case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR";
23067 default: return "invalid";
23068 }
23069 }
23070
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023071 VULKAN_HPP_INLINE std::string to_string(AttachmentLoadOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023072 {
23073 switch (value)
23074 {
23075 case AttachmentLoadOp::eLoad: return "Load";
23076 case AttachmentLoadOp::eClear: return "Clear";
23077 case AttachmentLoadOp::eDontCare: return "DontCare";
23078 default: return "invalid";
23079 }
23080 }
23081
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023082 VULKAN_HPP_INLINE std::string to_string(AttachmentStoreOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023083 {
23084 switch (value)
23085 {
23086 case AttachmentStoreOp::eStore: return "Store";
23087 case AttachmentStoreOp::eDontCare: return "DontCare";
23088 default: return "invalid";
23089 }
23090 }
23091
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023092 VULKAN_HPP_INLINE std::string to_string(ImageType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023093 {
23094 switch (value)
23095 {
23096 case ImageType::e1D: return "1D";
23097 case ImageType::e2D: return "2D";
23098 case ImageType::e3D: return "3D";
23099 default: return "invalid";
23100 }
23101 }
23102
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023103 VULKAN_HPP_INLINE std::string to_string(ImageTiling value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023104 {
23105 switch (value)
23106 {
23107 case ImageTiling::eOptimal: return "Optimal";
23108 case ImageTiling::eLinear: return "Linear";
23109 default: return "invalid";
23110 }
23111 }
23112
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023113 VULKAN_HPP_INLINE std::string to_string(ImageViewType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023114 {
23115 switch (value)
23116 {
23117 case ImageViewType::e1D: return "1D";
23118 case ImageViewType::e2D: return "2D";
23119 case ImageViewType::e3D: return "3D";
23120 case ImageViewType::eCube: return "Cube";
23121 case ImageViewType::e1DArray: return "1DArray";
23122 case ImageViewType::e2DArray: return "2DArray";
23123 case ImageViewType::eCubeArray: return "CubeArray";
23124 default: return "invalid";
23125 }
23126 }
23127
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023128 VULKAN_HPP_INLINE std::string to_string(CommandBufferLevel value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023129 {
23130 switch (value)
23131 {
23132 case CommandBufferLevel::ePrimary: return "Primary";
23133 case CommandBufferLevel::eSecondary: return "Secondary";
23134 default: return "invalid";
23135 }
23136 }
23137
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023138 VULKAN_HPP_INLINE std::string to_string(ComponentSwizzle value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023139 {
23140 switch (value)
23141 {
23142 case ComponentSwizzle::eIdentity: return "Identity";
23143 case ComponentSwizzle::eZero: return "Zero";
23144 case ComponentSwizzle::eOne: return "One";
23145 case ComponentSwizzle::eR: return "R";
23146 case ComponentSwizzle::eG: return "G";
23147 case ComponentSwizzle::eB: return "B";
23148 case ComponentSwizzle::eA: return "A";
23149 default: return "invalid";
23150 }
23151 }
23152
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023153 VULKAN_HPP_INLINE std::string to_string(DescriptorType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023154 {
23155 switch (value)
23156 {
23157 case DescriptorType::eSampler: return "Sampler";
23158 case DescriptorType::eCombinedImageSampler: return "CombinedImageSampler";
23159 case DescriptorType::eSampledImage: return "SampledImage";
23160 case DescriptorType::eStorageImage: return "StorageImage";
23161 case DescriptorType::eUniformTexelBuffer: return "UniformTexelBuffer";
23162 case DescriptorType::eStorageTexelBuffer: return "StorageTexelBuffer";
23163 case DescriptorType::eUniformBuffer: return "UniformBuffer";
23164 case DescriptorType::eStorageBuffer: return "StorageBuffer";
23165 case DescriptorType::eUniformBufferDynamic: return "UniformBufferDynamic";
23166 case DescriptorType::eStorageBufferDynamic: return "StorageBufferDynamic";
23167 case DescriptorType::eInputAttachment: return "InputAttachment";
23168 default: return "invalid";
23169 }
23170 }
23171
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023172 VULKAN_HPP_INLINE std::string to_string(QueryType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023173 {
23174 switch (value)
23175 {
23176 case QueryType::eOcclusion: return "Occlusion";
23177 case QueryType::ePipelineStatistics: return "PipelineStatistics";
23178 case QueryType::eTimestamp: return "Timestamp";
23179 default: return "invalid";
23180 }
23181 }
23182
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023183 VULKAN_HPP_INLINE std::string to_string(BorderColor value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023184 {
23185 switch (value)
23186 {
23187 case BorderColor::eFloatTransparentBlack: return "FloatTransparentBlack";
23188 case BorderColor::eIntTransparentBlack: return "IntTransparentBlack";
23189 case BorderColor::eFloatOpaqueBlack: return "FloatOpaqueBlack";
23190 case BorderColor::eIntOpaqueBlack: return "IntOpaqueBlack";
23191 case BorderColor::eFloatOpaqueWhite: return "FloatOpaqueWhite";
23192 case BorderColor::eIntOpaqueWhite: return "IntOpaqueWhite";
23193 default: return "invalid";
23194 }
23195 }
23196
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023197 VULKAN_HPP_INLINE std::string to_string(PipelineBindPoint value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023198 {
23199 switch (value)
23200 {
23201 case PipelineBindPoint::eGraphics: return "Graphics";
23202 case PipelineBindPoint::eCompute: return "Compute";
23203 default: return "invalid";
23204 }
23205 }
23206
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023207 VULKAN_HPP_INLINE std::string to_string(PipelineCacheHeaderVersion value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023208 {
23209 switch (value)
23210 {
23211 case PipelineCacheHeaderVersion::eOne: return "One";
23212 default: return "invalid";
23213 }
23214 }
23215
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023216 VULKAN_HPP_INLINE std::string to_string(PrimitiveTopology value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023217 {
23218 switch (value)
23219 {
23220 case PrimitiveTopology::ePointList: return "PointList";
23221 case PrimitiveTopology::eLineList: return "LineList";
23222 case PrimitiveTopology::eLineStrip: return "LineStrip";
23223 case PrimitiveTopology::eTriangleList: return "TriangleList";
23224 case PrimitiveTopology::eTriangleStrip: return "TriangleStrip";
23225 case PrimitiveTopology::eTriangleFan: return "TriangleFan";
23226 case PrimitiveTopology::eLineListWithAdjacency: return "LineListWithAdjacency";
23227 case PrimitiveTopology::eLineStripWithAdjacency: return "LineStripWithAdjacency";
23228 case PrimitiveTopology::eTriangleListWithAdjacency: return "TriangleListWithAdjacency";
23229 case PrimitiveTopology::eTriangleStripWithAdjacency: return "TriangleStripWithAdjacency";
23230 case PrimitiveTopology::ePatchList: return "PatchList";
23231 default: return "invalid";
23232 }
23233 }
23234
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023235 VULKAN_HPP_INLINE std::string to_string(SharingMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023236 {
23237 switch (value)
23238 {
23239 case SharingMode::eExclusive: return "Exclusive";
23240 case SharingMode::eConcurrent: return "Concurrent";
23241 default: return "invalid";
23242 }
23243 }
23244
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023245 VULKAN_HPP_INLINE std::string to_string(IndexType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023246 {
23247 switch (value)
23248 {
23249 case IndexType::eUint16: return "Uint16";
23250 case IndexType::eUint32: return "Uint32";
23251 default: return "invalid";
23252 }
23253 }
23254
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023255 VULKAN_HPP_INLINE std::string to_string(Filter value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023256 {
23257 switch (value)
23258 {
23259 case Filter::eNearest: return "Nearest";
23260 case Filter::eLinear: return "Linear";
23261 case Filter::eCubicIMG: return "CubicIMG";
23262 default: return "invalid";
23263 }
23264 }
23265
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023266 VULKAN_HPP_INLINE std::string to_string(SamplerMipmapMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023267 {
23268 switch (value)
23269 {
23270 case SamplerMipmapMode::eNearest: return "Nearest";
23271 case SamplerMipmapMode::eLinear: return "Linear";
23272 default: return "invalid";
23273 }
23274 }
23275
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023276 VULKAN_HPP_INLINE std::string to_string(SamplerAddressMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023277 {
23278 switch (value)
23279 {
23280 case SamplerAddressMode::eRepeat: return "Repeat";
23281 case SamplerAddressMode::eMirroredRepeat: return "MirroredRepeat";
23282 case SamplerAddressMode::eClampToEdge: return "ClampToEdge";
23283 case SamplerAddressMode::eClampToBorder: return "ClampToBorder";
23284 case SamplerAddressMode::eMirrorClampToEdge: return "MirrorClampToEdge";
23285 default: return "invalid";
23286 }
23287 }
23288
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023289 VULKAN_HPP_INLINE std::string to_string(CompareOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023290 {
23291 switch (value)
23292 {
23293 case CompareOp::eNever: return "Never";
23294 case CompareOp::eLess: return "Less";
23295 case CompareOp::eEqual: return "Equal";
23296 case CompareOp::eLessOrEqual: return "LessOrEqual";
23297 case CompareOp::eGreater: return "Greater";
23298 case CompareOp::eNotEqual: return "NotEqual";
23299 case CompareOp::eGreaterOrEqual: return "GreaterOrEqual";
23300 case CompareOp::eAlways: return "Always";
23301 default: return "invalid";
23302 }
23303 }
23304
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023305 VULKAN_HPP_INLINE std::string to_string(PolygonMode value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023306 {
23307 switch (value)
23308 {
23309 case PolygonMode::eFill: return "Fill";
23310 case PolygonMode::eLine: return "Line";
23311 case PolygonMode::ePoint: return "Point";
23312 default: return "invalid";
23313 }
23314 }
23315
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023316 VULKAN_HPP_INLINE std::string to_string(CullModeFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023317 {
23318 switch (value)
23319 {
23320 case CullModeFlagBits::eNone: return "None";
23321 case CullModeFlagBits::eFront: return "Front";
23322 case CullModeFlagBits::eBack: return "Back";
23323 case CullModeFlagBits::eFrontAndBack: return "FrontAndBack";
23324 default: return "invalid";
23325 }
23326 }
23327
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023328 VULKAN_HPP_INLINE std::string to_string(CullModeFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023329 {
23330 if (!value) return "{}";
23331 std::string result;
23332 if (value & CullModeFlagBits::eNone) result += "None | ";
23333 if (value & CullModeFlagBits::eFront) result += "Front | ";
23334 if (value & CullModeFlagBits::eBack) result += "Back | ";
23335 if (value & CullModeFlagBits::eFrontAndBack) result += "FrontAndBack | ";
23336 return "{" + result.substr(0, result.size() - 3) + "}";
23337 }
23338
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023339 VULKAN_HPP_INLINE std::string to_string(FrontFace value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023340 {
23341 switch (value)
23342 {
23343 case FrontFace::eCounterClockwise: return "CounterClockwise";
23344 case FrontFace::eClockwise: return "Clockwise";
23345 default: return "invalid";
23346 }
23347 }
23348
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023349 VULKAN_HPP_INLINE std::string to_string(BlendFactor value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023350 {
23351 switch (value)
23352 {
23353 case BlendFactor::eZero: return "Zero";
23354 case BlendFactor::eOne: return "One";
23355 case BlendFactor::eSrcColor: return "SrcColor";
23356 case BlendFactor::eOneMinusSrcColor: return "OneMinusSrcColor";
23357 case BlendFactor::eDstColor: return "DstColor";
23358 case BlendFactor::eOneMinusDstColor: return "OneMinusDstColor";
23359 case BlendFactor::eSrcAlpha: return "SrcAlpha";
23360 case BlendFactor::eOneMinusSrcAlpha: return "OneMinusSrcAlpha";
23361 case BlendFactor::eDstAlpha: return "DstAlpha";
23362 case BlendFactor::eOneMinusDstAlpha: return "OneMinusDstAlpha";
23363 case BlendFactor::eConstantColor: return "ConstantColor";
23364 case BlendFactor::eOneMinusConstantColor: return "OneMinusConstantColor";
23365 case BlendFactor::eConstantAlpha: return "ConstantAlpha";
23366 case BlendFactor::eOneMinusConstantAlpha: return "OneMinusConstantAlpha";
23367 case BlendFactor::eSrcAlphaSaturate: return "SrcAlphaSaturate";
23368 case BlendFactor::eSrc1Color: return "Src1Color";
23369 case BlendFactor::eOneMinusSrc1Color: return "OneMinusSrc1Color";
23370 case BlendFactor::eSrc1Alpha: return "Src1Alpha";
23371 case BlendFactor::eOneMinusSrc1Alpha: return "OneMinusSrc1Alpha";
23372 default: return "invalid";
23373 }
23374 }
23375
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023376 VULKAN_HPP_INLINE std::string to_string(BlendOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023377 {
23378 switch (value)
23379 {
23380 case BlendOp::eAdd: return "Add";
23381 case BlendOp::eSubtract: return "Subtract";
23382 case BlendOp::eReverseSubtract: return "ReverseSubtract";
23383 case BlendOp::eMin: return "Min";
23384 case BlendOp::eMax: return "Max";
23385 default: return "invalid";
23386 }
23387 }
23388
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023389 VULKAN_HPP_INLINE std::string to_string(StencilOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023390 {
23391 switch (value)
23392 {
23393 case StencilOp::eKeep: return "Keep";
23394 case StencilOp::eZero: return "Zero";
23395 case StencilOp::eReplace: return "Replace";
23396 case StencilOp::eIncrementAndClamp: return "IncrementAndClamp";
23397 case StencilOp::eDecrementAndClamp: return "DecrementAndClamp";
23398 case StencilOp::eInvert: return "Invert";
23399 case StencilOp::eIncrementAndWrap: return "IncrementAndWrap";
23400 case StencilOp::eDecrementAndWrap: return "DecrementAndWrap";
23401 default: return "invalid";
23402 }
23403 }
23404
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023405 VULKAN_HPP_INLINE std::string to_string(LogicOp value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023406 {
23407 switch (value)
23408 {
23409 case LogicOp::eClear: return "Clear";
23410 case LogicOp::eAnd: return "And";
23411 case LogicOp::eAndReverse: return "AndReverse";
23412 case LogicOp::eCopy: return "Copy";
23413 case LogicOp::eAndInverted: return "AndInverted";
23414 case LogicOp::eNoOp: return "NoOp";
23415 case LogicOp::eXor: return "Xor";
23416 case LogicOp::eOr: return "Or";
23417 case LogicOp::eNor: return "Nor";
23418 case LogicOp::eEquivalent: return "Equivalent";
23419 case LogicOp::eInvert: return "Invert";
23420 case LogicOp::eOrReverse: return "OrReverse";
23421 case LogicOp::eCopyInverted: return "CopyInverted";
23422 case LogicOp::eOrInverted: return "OrInverted";
23423 case LogicOp::eNand: return "Nand";
23424 case LogicOp::eSet: return "Set";
23425 default: return "invalid";
23426 }
23427 }
23428
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023429 VULKAN_HPP_INLINE std::string to_string(InternalAllocationType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023430 {
23431 switch (value)
23432 {
23433 case InternalAllocationType::eExecutable: return "Executable";
23434 default: return "invalid";
23435 }
23436 }
23437
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023438 VULKAN_HPP_INLINE std::string to_string(SystemAllocationScope value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023439 {
23440 switch (value)
23441 {
23442 case SystemAllocationScope::eCommand: return "Command";
23443 case SystemAllocationScope::eObject: return "Object";
23444 case SystemAllocationScope::eCache: return "Cache";
23445 case SystemAllocationScope::eDevice: return "Device";
23446 case SystemAllocationScope::eInstance: return "Instance";
23447 default: return "invalid";
23448 }
23449 }
23450
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023451 VULKAN_HPP_INLINE std::string to_string(PhysicalDeviceType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023452 {
23453 switch (value)
23454 {
23455 case PhysicalDeviceType::eOther: return "Other";
23456 case PhysicalDeviceType::eIntegratedGpu: return "IntegratedGpu";
23457 case PhysicalDeviceType::eDiscreteGpu: return "DiscreteGpu";
23458 case PhysicalDeviceType::eVirtualGpu: return "VirtualGpu";
23459 case PhysicalDeviceType::eCpu: return "Cpu";
23460 default: return "invalid";
23461 }
23462 }
23463
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023464 VULKAN_HPP_INLINE std::string to_string(VertexInputRate value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023465 {
23466 switch (value)
23467 {
23468 case VertexInputRate::eVertex: return "Vertex";
23469 case VertexInputRate::eInstance: return "Instance";
23470 default: return "invalid";
23471 }
23472 }
23473
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023474 VULKAN_HPP_INLINE std::string to_string(Format value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023475 {
23476 switch (value)
23477 {
23478 case Format::eUndefined: return "Undefined";
23479 case Format::eR4G4UnormPack8: return "R4G4UnormPack8";
23480 case Format::eR4G4B4A4UnormPack16: return "R4G4B4A4UnormPack16";
23481 case Format::eB4G4R4A4UnormPack16: return "B4G4R4A4UnormPack16";
23482 case Format::eR5G6B5UnormPack16: return "R5G6B5UnormPack16";
23483 case Format::eB5G6R5UnormPack16: return "B5G6R5UnormPack16";
23484 case Format::eR5G5B5A1UnormPack16: return "R5G5B5A1UnormPack16";
23485 case Format::eB5G5R5A1UnormPack16: return "B5G5R5A1UnormPack16";
23486 case Format::eA1R5G5B5UnormPack16: return "A1R5G5B5UnormPack16";
23487 case Format::eR8Unorm: return "R8Unorm";
23488 case Format::eR8Snorm: return "R8Snorm";
23489 case Format::eR8Uscaled: return "R8Uscaled";
23490 case Format::eR8Sscaled: return "R8Sscaled";
23491 case Format::eR8Uint: return "R8Uint";
23492 case Format::eR8Sint: return "R8Sint";
23493 case Format::eR8Srgb: return "R8Srgb";
23494 case Format::eR8G8Unorm: return "R8G8Unorm";
23495 case Format::eR8G8Snorm: return "R8G8Snorm";
23496 case Format::eR8G8Uscaled: return "R8G8Uscaled";
23497 case Format::eR8G8Sscaled: return "R8G8Sscaled";
23498 case Format::eR8G8Uint: return "R8G8Uint";
23499 case Format::eR8G8Sint: return "R8G8Sint";
23500 case Format::eR8G8Srgb: return "R8G8Srgb";
23501 case Format::eR8G8B8Unorm: return "R8G8B8Unorm";
23502 case Format::eR8G8B8Snorm: return "R8G8B8Snorm";
23503 case Format::eR8G8B8Uscaled: return "R8G8B8Uscaled";
23504 case Format::eR8G8B8Sscaled: return "R8G8B8Sscaled";
23505 case Format::eR8G8B8Uint: return "R8G8B8Uint";
23506 case Format::eR8G8B8Sint: return "R8G8B8Sint";
23507 case Format::eR8G8B8Srgb: return "R8G8B8Srgb";
23508 case Format::eB8G8R8Unorm: return "B8G8R8Unorm";
23509 case Format::eB8G8R8Snorm: return "B8G8R8Snorm";
23510 case Format::eB8G8R8Uscaled: return "B8G8R8Uscaled";
23511 case Format::eB8G8R8Sscaled: return "B8G8R8Sscaled";
23512 case Format::eB8G8R8Uint: return "B8G8R8Uint";
23513 case Format::eB8G8R8Sint: return "B8G8R8Sint";
23514 case Format::eB8G8R8Srgb: return "B8G8R8Srgb";
23515 case Format::eR8G8B8A8Unorm: return "R8G8B8A8Unorm";
23516 case Format::eR8G8B8A8Snorm: return "R8G8B8A8Snorm";
23517 case Format::eR8G8B8A8Uscaled: return "R8G8B8A8Uscaled";
23518 case Format::eR8G8B8A8Sscaled: return "R8G8B8A8Sscaled";
23519 case Format::eR8G8B8A8Uint: return "R8G8B8A8Uint";
23520 case Format::eR8G8B8A8Sint: return "R8G8B8A8Sint";
23521 case Format::eR8G8B8A8Srgb: return "R8G8B8A8Srgb";
23522 case Format::eB8G8R8A8Unorm: return "B8G8R8A8Unorm";
23523 case Format::eB8G8R8A8Snorm: return "B8G8R8A8Snorm";
23524 case Format::eB8G8R8A8Uscaled: return "B8G8R8A8Uscaled";
23525 case Format::eB8G8R8A8Sscaled: return "B8G8R8A8Sscaled";
23526 case Format::eB8G8R8A8Uint: return "B8G8R8A8Uint";
23527 case Format::eB8G8R8A8Sint: return "B8G8R8A8Sint";
23528 case Format::eB8G8R8A8Srgb: return "B8G8R8A8Srgb";
23529 case Format::eA8B8G8R8UnormPack32: return "A8B8G8R8UnormPack32";
23530 case Format::eA8B8G8R8SnormPack32: return "A8B8G8R8SnormPack32";
23531 case Format::eA8B8G8R8UscaledPack32: return "A8B8G8R8UscaledPack32";
23532 case Format::eA8B8G8R8SscaledPack32: return "A8B8G8R8SscaledPack32";
23533 case Format::eA8B8G8R8UintPack32: return "A8B8G8R8UintPack32";
23534 case Format::eA8B8G8R8SintPack32: return "A8B8G8R8SintPack32";
23535 case Format::eA8B8G8R8SrgbPack32: return "A8B8G8R8SrgbPack32";
23536 case Format::eA2R10G10B10UnormPack32: return "A2R10G10B10UnormPack32";
23537 case Format::eA2R10G10B10SnormPack32: return "A2R10G10B10SnormPack32";
23538 case Format::eA2R10G10B10UscaledPack32: return "A2R10G10B10UscaledPack32";
23539 case Format::eA2R10G10B10SscaledPack32: return "A2R10G10B10SscaledPack32";
23540 case Format::eA2R10G10B10UintPack32: return "A2R10G10B10UintPack32";
23541 case Format::eA2R10G10B10SintPack32: return "A2R10G10B10SintPack32";
23542 case Format::eA2B10G10R10UnormPack32: return "A2B10G10R10UnormPack32";
23543 case Format::eA2B10G10R10SnormPack32: return "A2B10G10R10SnormPack32";
23544 case Format::eA2B10G10R10UscaledPack32: return "A2B10G10R10UscaledPack32";
23545 case Format::eA2B10G10R10SscaledPack32: return "A2B10G10R10SscaledPack32";
23546 case Format::eA2B10G10R10UintPack32: return "A2B10G10R10UintPack32";
23547 case Format::eA2B10G10R10SintPack32: return "A2B10G10R10SintPack32";
23548 case Format::eR16Unorm: return "R16Unorm";
23549 case Format::eR16Snorm: return "R16Snorm";
23550 case Format::eR16Uscaled: return "R16Uscaled";
23551 case Format::eR16Sscaled: return "R16Sscaled";
23552 case Format::eR16Uint: return "R16Uint";
23553 case Format::eR16Sint: return "R16Sint";
23554 case Format::eR16Sfloat: return "R16Sfloat";
23555 case Format::eR16G16Unorm: return "R16G16Unorm";
23556 case Format::eR16G16Snorm: return "R16G16Snorm";
23557 case Format::eR16G16Uscaled: return "R16G16Uscaled";
23558 case Format::eR16G16Sscaled: return "R16G16Sscaled";
23559 case Format::eR16G16Uint: return "R16G16Uint";
23560 case Format::eR16G16Sint: return "R16G16Sint";
23561 case Format::eR16G16Sfloat: return "R16G16Sfloat";
23562 case Format::eR16G16B16Unorm: return "R16G16B16Unorm";
23563 case Format::eR16G16B16Snorm: return "R16G16B16Snorm";
23564 case Format::eR16G16B16Uscaled: return "R16G16B16Uscaled";
23565 case Format::eR16G16B16Sscaled: return "R16G16B16Sscaled";
23566 case Format::eR16G16B16Uint: return "R16G16B16Uint";
23567 case Format::eR16G16B16Sint: return "R16G16B16Sint";
23568 case Format::eR16G16B16Sfloat: return "R16G16B16Sfloat";
23569 case Format::eR16G16B16A16Unorm: return "R16G16B16A16Unorm";
23570 case Format::eR16G16B16A16Snorm: return "R16G16B16A16Snorm";
23571 case Format::eR16G16B16A16Uscaled: return "R16G16B16A16Uscaled";
23572 case Format::eR16G16B16A16Sscaled: return "R16G16B16A16Sscaled";
23573 case Format::eR16G16B16A16Uint: return "R16G16B16A16Uint";
23574 case Format::eR16G16B16A16Sint: return "R16G16B16A16Sint";
23575 case Format::eR16G16B16A16Sfloat: return "R16G16B16A16Sfloat";
23576 case Format::eR32Uint: return "R32Uint";
23577 case Format::eR32Sint: return "R32Sint";
23578 case Format::eR32Sfloat: return "R32Sfloat";
23579 case Format::eR32G32Uint: return "R32G32Uint";
23580 case Format::eR32G32Sint: return "R32G32Sint";
23581 case Format::eR32G32Sfloat: return "R32G32Sfloat";
23582 case Format::eR32G32B32Uint: return "R32G32B32Uint";
23583 case Format::eR32G32B32Sint: return "R32G32B32Sint";
23584 case Format::eR32G32B32Sfloat: return "R32G32B32Sfloat";
23585 case Format::eR32G32B32A32Uint: return "R32G32B32A32Uint";
23586 case Format::eR32G32B32A32Sint: return "R32G32B32A32Sint";
23587 case Format::eR32G32B32A32Sfloat: return "R32G32B32A32Sfloat";
23588 case Format::eR64Uint: return "R64Uint";
23589 case Format::eR64Sint: return "R64Sint";
23590 case Format::eR64Sfloat: return "R64Sfloat";
23591 case Format::eR64G64Uint: return "R64G64Uint";
23592 case Format::eR64G64Sint: return "R64G64Sint";
23593 case Format::eR64G64Sfloat: return "R64G64Sfloat";
23594 case Format::eR64G64B64Uint: return "R64G64B64Uint";
23595 case Format::eR64G64B64Sint: return "R64G64B64Sint";
23596 case Format::eR64G64B64Sfloat: return "R64G64B64Sfloat";
23597 case Format::eR64G64B64A64Uint: return "R64G64B64A64Uint";
23598 case Format::eR64G64B64A64Sint: return "R64G64B64A64Sint";
23599 case Format::eR64G64B64A64Sfloat: return "R64G64B64A64Sfloat";
23600 case Format::eB10G11R11UfloatPack32: return "B10G11R11UfloatPack32";
23601 case Format::eE5B9G9R9UfloatPack32: return "E5B9G9R9UfloatPack32";
23602 case Format::eD16Unorm: return "D16Unorm";
23603 case Format::eX8D24UnormPack32: return "X8D24UnormPack32";
23604 case Format::eD32Sfloat: return "D32Sfloat";
23605 case Format::eS8Uint: return "S8Uint";
23606 case Format::eD16UnormS8Uint: return "D16UnormS8Uint";
23607 case Format::eD24UnormS8Uint: return "D24UnormS8Uint";
23608 case Format::eD32SfloatS8Uint: return "D32SfloatS8Uint";
23609 case Format::eBc1RgbUnormBlock: return "Bc1RgbUnormBlock";
23610 case Format::eBc1RgbSrgbBlock: return "Bc1RgbSrgbBlock";
23611 case Format::eBc1RgbaUnormBlock: return "Bc1RgbaUnormBlock";
23612 case Format::eBc1RgbaSrgbBlock: return "Bc1RgbaSrgbBlock";
23613 case Format::eBc2UnormBlock: return "Bc2UnormBlock";
23614 case Format::eBc2SrgbBlock: return "Bc2SrgbBlock";
23615 case Format::eBc3UnormBlock: return "Bc3UnormBlock";
23616 case Format::eBc3SrgbBlock: return "Bc3SrgbBlock";
23617 case Format::eBc4UnormBlock: return "Bc4UnormBlock";
23618 case Format::eBc4SnormBlock: return "Bc4SnormBlock";
23619 case Format::eBc5UnormBlock: return "Bc5UnormBlock";
23620 case Format::eBc5SnormBlock: return "Bc5SnormBlock";
23621 case Format::eBc6HUfloatBlock: return "Bc6HUfloatBlock";
23622 case Format::eBc6HSfloatBlock: return "Bc6HSfloatBlock";
23623 case Format::eBc7UnormBlock: return "Bc7UnormBlock";
23624 case Format::eBc7SrgbBlock: return "Bc7SrgbBlock";
23625 case Format::eEtc2R8G8B8UnormBlock: return "Etc2R8G8B8UnormBlock";
23626 case Format::eEtc2R8G8B8SrgbBlock: return "Etc2R8G8B8SrgbBlock";
23627 case Format::eEtc2R8G8B8A1UnormBlock: return "Etc2R8G8B8A1UnormBlock";
23628 case Format::eEtc2R8G8B8A1SrgbBlock: return "Etc2R8G8B8A1SrgbBlock";
23629 case Format::eEtc2R8G8B8A8UnormBlock: return "Etc2R8G8B8A8UnormBlock";
23630 case Format::eEtc2R8G8B8A8SrgbBlock: return "Etc2R8G8B8A8SrgbBlock";
23631 case Format::eEacR11UnormBlock: return "EacR11UnormBlock";
23632 case Format::eEacR11SnormBlock: return "EacR11SnormBlock";
23633 case Format::eEacR11G11UnormBlock: return "EacR11G11UnormBlock";
23634 case Format::eEacR11G11SnormBlock: return "EacR11G11SnormBlock";
23635 case Format::eAstc4x4UnormBlock: return "Astc4x4UnormBlock";
23636 case Format::eAstc4x4SrgbBlock: return "Astc4x4SrgbBlock";
23637 case Format::eAstc5x4UnormBlock: return "Astc5x4UnormBlock";
23638 case Format::eAstc5x4SrgbBlock: return "Astc5x4SrgbBlock";
23639 case Format::eAstc5x5UnormBlock: return "Astc5x5UnormBlock";
23640 case Format::eAstc5x5SrgbBlock: return "Astc5x5SrgbBlock";
23641 case Format::eAstc6x5UnormBlock: return "Astc6x5UnormBlock";
23642 case Format::eAstc6x5SrgbBlock: return "Astc6x5SrgbBlock";
23643 case Format::eAstc6x6UnormBlock: return "Astc6x6UnormBlock";
23644 case Format::eAstc6x6SrgbBlock: return "Astc6x6SrgbBlock";
23645 case Format::eAstc8x5UnormBlock: return "Astc8x5UnormBlock";
23646 case Format::eAstc8x5SrgbBlock: return "Astc8x5SrgbBlock";
23647 case Format::eAstc8x6UnormBlock: return "Astc8x6UnormBlock";
23648 case Format::eAstc8x6SrgbBlock: return "Astc8x6SrgbBlock";
23649 case Format::eAstc8x8UnormBlock: return "Astc8x8UnormBlock";
23650 case Format::eAstc8x8SrgbBlock: return "Astc8x8SrgbBlock";
23651 case Format::eAstc10x5UnormBlock: return "Astc10x5UnormBlock";
23652 case Format::eAstc10x5SrgbBlock: return "Astc10x5SrgbBlock";
23653 case Format::eAstc10x6UnormBlock: return "Astc10x6UnormBlock";
23654 case Format::eAstc10x6SrgbBlock: return "Astc10x6SrgbBlock";
23655 case Format::eAstc10x8UnormBlock: return "Astc10x8UnormBlock";
23656 case Format::eAstc10x8SrgbBlock: return "Astc10x8SrgbBlock";
23657 case Format::eAstc10x10UnormBlock: return "Astc10x10UnormBlock";
23658 case Format::eAstc10x10SrgbBlock: return "Astc10x10SrgbBlock";
23659 case Format::eAstc12x10UnormBlock: return "Astc12x10UnormBlock";
23660 case Format::eAstc12x10SrgbBlock: return "Astc12x10SrgbBlock";
23661 case Format::eAstc12x12UnormBlock: return "Astc12x12UnormBlock";
23662 case Format::eAstc12x12SrgbBlock: return "Astc12x12SrgbBlock";
Lenny Komowebf33162016-08-26 14:10:08 -060023663 case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG";
23664 case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG";
23665 case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG";
23666 case Format::ePvrtc24BppUnormBlockIMG: return "Pvrtc24BppUnormBlockIMG";
23667 case Format::ePvrtc12BppSrgbBlockIMG: return "Pvrtc12BppSrgbBlockIMG";
23668 case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG";
23669 case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG";
23670 case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023671 default: return "invalid";
23672 }
23673 }
23674
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023675 VULKAN_HPP_INLINE std::string to_string(StructureType value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023676 {
23677 switch (value)
23678 {
23679 case StructureType::eApplicationInfo: return "ApplicationInfo";
23680 case StructureType::eInstanceCreateInfo: return "InstanceCreateInfo";
23681 case StructureType::eDeviceQueueCreateInfo: return "DeviceQueueCreateInfo";
23682 case StructureType::eDeviceCreateInfo: return "DeviceCreateInfo";
23683 case StructureType::eSubmitInfo: return "SubmitInfo";
23684 case StructureType::eMemoryAllocateInfo: return "MemoryAllocateInfo";
23685 case StructureType::eMappedMemoryRange: return "MappedMemoryRange";
23686 case StructureType::eBindSparseInfo: return "BindSparseInfo";
23687 case StructureType::eFenceCreateInfo: return "FenceCreateInfo";
23688 case StructureType::eSemaphoreCreateInfo: return "SemaphoreCreateInfo";
23689 case StructureType::eEventCreateInfo: return "EventCreateInfo";
23690 case StructureType::eQueryPoolCreateInfo: return "QueryPoolCreateInfo";
23691 case StructureType::eBufferCreateInfo: return "BufferCreateInfo";
23692 case StructureType::eBufferViewCreateInfo: return "BufferViewCreateInfo";
23693 case StructureType::eImageCreateInfo: return "ImageCreateInfo";
23694 case StructureType::eImageViewCreateInfo: return "ImageViewCreateInfo";
23695 case StructureType::eShaderModuleCreateInfo: return "ShaderModuleCreateInfo";
23696 case StructureType::ePipelineCacheCreateInfo: return "PipelineCacheCreateInfo";
23697 case StructureType::ePipelineShaderStageCreateInfo: return "PipelineShaderStageCreateInfo";
23698 case StructureType::ePipelineVertexInputStateCreateInfo: return "PipelineVertexInputStateCreateInfo";
23699 case StructureType::ePipelineInputAssemblyStateCreateInfo: return "PipelineInputAssemblyStateCreateInfo";
23700 case StructureType::ePipelineTessellationStateCreateInfo: return "PipelineTessellationStateCreateInfo";
23701 case StructureType::ePipelineViewportStateCreateInfo: return "PipelineViewportStateCreateInfo";
23702 case StructureType::ePipelineRasterizationStateCreateInfo: return "PipelineRasterizationStateCreateInfo";
23703 case StructureType::ePipelineMultisampleStateCreateInfo: return "PipelineMultisampleStateCreateInfo";
23704 case StructureType::ePipelineDepthStencilStateCreateInfo: return "PipelineDepthStencilStateCreateInfo";
23705 case StructureType::ePipelineColorBlendStateCreateInfo: return "PipelineColorBlendStateCreateInfo";
23706 case StructureType::ePipelineDynamicStateCreateInfo: return "PipelineDynamicStateCreateInfo";
23707 case StructureType::eGraphicsPipelineCreateInfo: return "GraphicsPipelineCreateInfo";
23708 case StructureType::eComputePipelineCreateInfo: return "ComputePipelineCreateInfo";
23709 case StructureType::ePipelineLayoutCreateInfo: return "PipelineLayoutCreateInfo";
23710 case StructureType::eSamplerCreateInfo: return "SamplerCreateInfo";
23711 case StructureType::eDescriptorSetLayoutCreateInfo: return "DescriptorSetLayoutCreateInfo";
23712 case StructureType::eDescriptorPoolCreateInfo: return "DescriptorPoolCreateInfo";
23713 case StructureType::eDescriptorSetAllocateInfo: return "DescriptorSetAllocateInfo";
23714 case StructureType::eWriteDescriptorSet: return "WriteDescriptorSet";
23715 case StructureType::eCopyDescriptorSet: return "CopyDescriptorSet";
23716 case StructureType::eFramebufferCreateInfo: return "FramebufferCreateInfo";
23717 case StructureType::eRenderPassCreateInfo: return "RenderPassCreateInfo";
23718 case StructureType::eCommandPoolCreateInfo: return "CommandPoolCreateInfo";
23719 case StructureType::eCommandBufferAllocateInfo: return "CommandBufferAllocateInfo";
23720 case StructureType::eCommandBufferInheritanceInfo: return "CommandBufferInheritanceInfo";
23721 case StructureType::eCommandBufferBeginInfo: return "CommandBufferBeginInfo";
23722 case StructureType::eRenderPassBeginInfo: return "RenderPassBeginInfo";
23723 case StructureType::eBufferMemoryBarrier: return "BufferMemoryBarrier";
23724 case StructureType::eImageMemoryBarrier: return "ImageMemoryBarrier";
23725 case StructureType::eMemoryBarrier: return "MemoryBarrier";
23726 case StructureType::eLoaderInstanceCreateInfo: return "LoaderInstanceCreateInfo";
23727 case StructureType::eLoaderDeviceCreateInfo: return "LoaderDeviceCreateInfo";
23728 case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR";
23729 case StructureType::ePresentInfoKHR: return "PresentInfoKHR";
23730 case StructureType::eDisplayModeCreateInfoKHR: return "DisplayModeCreateInfoKHR";
23731 case StructureType::eDisplaySurfaceCreateInfoKHR: return "DisplaySurfaceCreateInfoKHR";
23732 case StructureType::eDisplayPresentInfoKHR: return "DisplayPresentInfoKHR";
23733 case StructureType::eXlibSurfaceCreateInfoKHR: return "XlibSurfaceCreateInfoKHR";
23734 case StructureType::eXcbSurfaceCreateInfoKHR: return "XcbSurfaceCreateInfoKHR";
23735 case StructureType::eWaylandSurfaceCreateInfoKHR: return "WaylandSurfaceCreateInfoKHR";
23736 case StructureType::eMirSurfaceCreateInfoKHR: return "MirSurfaceCreateInfoKHR";
23737 case StructureType::eAndroidSurfaceCreateInfoKHR: return "AndroidSurfaceCreateInfoKHR";
23738 case StructureType::eWin32SurfaceCreateInfoKHR: return "Win32SurfaceCreateInfoKHR";
23739 case StructureType::eDebugReportCallbackCreateInfoEXT: return "DebugReportCallbackCreateInfoEXT";
23740 case StructureType::ePipelineRasterizationStateRasterizationOrderAMD: return "PipelineRasterizationStateRasterizationOrderAMD";
23741 case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT";
23742 case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT";
23743 case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT";
23744 case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV";
23745 case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV";
23746 case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV";
Lenny Komow6501c122016-08-31 15:03:49 -060023747 case StructureType::eExternalMemoryImageCreateInfoNV: return "ExternalMemoryImageCreateInfoNV";
23748 case StructureType::eExportMemoryAllocateInfoNV: return "ExportMemoryAllocateInfoNV";
23749 case StructureType::eImportMemoryWin32HandleInfoNV: return "ImportMemoryWin32HandleInfoNV";
23750 case StructureType::eExportMemoryWin32HandleInfoNV: return "ExportMemoryWin32HandleInfoNV";
23751 case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV: return "Win32KeyedMutexAcquireReleaseInfoNV";
Mark Young39389872017-01-19 21:10:49 -070023752 case StructureType::ePhysicalDeviceFeatures2KHR: return "PhysicalDeviceFeatures2KHR";
23753 case StructureType::ePhysicalDeviceProperties2KHR: return "PhysicalDeviceProperties2KHR";
23754 case StructureType::eFormatProperties2KHR: return "FormatProperties2KHR";
23755 case StructureType::eImageFormatProperties2KHR: return "ImageFormatProperties2KHR";
23756 case StructureType::ePhysicalDeviceImageFormatInfo2KHR: return "PhysicalDeviceImageFormatInfo2KHR";
23757 case StructureType::eQueueFamilyProperties2KHR: return "QueueFamilyProperties2KHR";
23758 case StructureType::ePhysicalDeviceMemoryProperties2KHR: return "PhysicalDeviceMemoryProperties2KHR";
23759 case StructureType::eSparseImageFormatProperties2KHR: return "SparseImageFormatProperties2KHR";
23760 case StructureType::ePhysicalDeviceSparseImageFormatInfo2KHR: return "PhysicalDeviceSparseImageFormatInfo2KHR";
Lenny Komow68432d72016-09-29 14:16:59 -060023761 case StructureType::eValidationFlagsEXT: return "ValidationFlagsEXT";
Mark Young39389872017-01-19 21:10:49 -070023762 case StructureType::eViSurfaceCreateInfoNN: return "ViSurfaceCreateInfoNN";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023763 case StructureType::eObjectTableCreateInfoNVX: return "ObjectTableCreateInfoNVX";
23764 case StructureType::eIndirectCommandsLayoutCreateInfoNVX: return "IndirectCommandsLayoutCreateInfoNVX";
23765 case StructureType::eCmdProcessCommandsInfoNVX: return "CmdProcessCommandsInfoNVX";
23766 case StructureType::eCmdReserveSpaceForCommandsInfoNVX: return "CmdReserveSpaceForCommandsInfoNVX";
23767 case StructureType::eDeviceGeneratedCommandsLimitsNVX: return "DeviceGeneratedCommandsLimitsNVX";
23768 case StructureType::eDeviceGeneratedCommandsFeaturesNVX: return "DeviceGeneratedCommandsFeaturesNVX";
Mark Young39389872017-01-19 21:10:49 -070023769 case StructureType::eSurfaceCapabilities2EXT: return "SurfaceCapabilities2EXT";
23770 case StructureType::eDisplayPowerInfoEXT: return "DisplayPowerInfoEXT";
23771 case StructureType::eDeviceEventInfoEXT: return "DeviceEventInfoEXT";
23772 case StructureType::eDisplayEventInfoEXT: return "DisplayEventInfoEXT";
23773 case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023774 default: return "invalid";
23775 }
23776 }
23777
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023778 VULKAN_HPP_INLINE std::string to_string(SubpassContents value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023779 {
23780 switch (value)
23781 {
23782 case SubpassContents::eInline: return "Inline";
23783 case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers";
23784 default: return "invalid";
23785 }
23786 }
23787
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023788 VULKAN_HPP_INLINE std::string to_string(DynamicState value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023789 {
23790 switch (value)
23791 {
23792 case DynamicState::eViewport: return "Viewport";
23793 case DynamicState::eScissor: return "Scissor";
23794 case DynamicState::eLineWidth: return "LineWidth";
23795 case DynamicState::eDepthBias: return "DepthBias";
23796 case DynamicState::eBlendConstants: return "BlendConstants";
23797 case DynamicState::eDepthBounds: return "DepthBounds";
23798 case DynamicState::eStencilCompareMask: return "StencilCompareMask";
23799 case DynamicState::eStencilWriteMask: return "StencilWriteMask";
23800 case DynamicState::eStencilReference: return "StencilReference";
23801 default: return "invalid";
23802 }
23803 }
23804
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023805 VULKAN_HPP_INLINE std::string to_string(QueueFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023806 {
23807 switch (value)
23808 {
23809 case QueueFlagBits::eGraphics: return "Graphics";
23810 case QueueFlagBits::eCompute: return "Compute";
23811 case QueueFlagBits::eTransfer: return "Transfer";
23812 case QueueFlagBits::eSparseBinding: return "SparseBinding";
23813 default: return "invalid";
23814 }
23815 }
23816
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023817 VULKAN_HPP_INLINE std::string to_string(QueueFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023818 {
23819 if (!value) return "{}";
23820 std::string result;
23821 if (value & QueueFlagBits::eGraphics) result += "Graphics | ";
23822 if (value & QueueFlagBits::eCompute) result += "Compute | ";
23823 if (value & QueueFlagBits::eTransfer) result += "Transfer | ";
23824 if (value & QueueFlagBits::eSparseBinding) result += "SparseBinding | ";
23825 return "{" + result.substr(0, result.size() - 3) + "}";
23826 }
23827
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023828 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023829 {
23830 switch (value)
23831 {
23832 case MemoryPropertyFlagBits::eDeviceLocal: return "DeviceLocal";
23833 case MemoryPropertyFlagBits::eHostVisible: return "HostVisible";
23834 case MemoryPropertyFlagBits::eHostCoherent: return "HostCoherent";
23835 case MemoryPropertyFlagBits::eHostCached: return "HostCached";
23836 case MemoryPropertyFlagBits::eLazilyAllocated: return "LazilyAllocated";
23837 default: return "invalid";
23838 }
23839 }
23840
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023841 VULKAN_HPP_INLINE std::string to_string(MemoryPropertyFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023842 {
23843 if (!value) return "{}";
23844 std::string result;
23845 if (value & MemoryPropertyFlagBits::eDeviceLocal) result += "DeviceLocal | ";
23846 if (value & MemoryPropertyFlagBits::eHostVisible) result += "HostVisible | ";
23847 if (value & MemoryPropertyFlagBits::eHostCoherent) result += "HostCoherent | ";
23848 if (value & MemoryPropertyFlagBits::eHostCached) result += "HostCached | ";
23849 if (value & MemoryPropertyFlagBits::eLazilyAllocated) result += "LazilyAllocated | ";
23850 return "{" + result.substr(0, result.size() - 3) + "}";
23851 }
23852
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023853 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023854 {
23855 switch (value)
23856 {
23857 case MemoryHeapFlagBits::eDeviceLocal: return "DeviceLocal";
23858 default: return "invalid";
23859 }
23860 }
23861
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023862 VULKAN_HPP_INLINE std::string to_string(MemoryHeapFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023863 {
23864 if (!value) return "{}";
23865 std::string result;
23866 if (value & MemoryHeapFlagBits::eDeviceLocal) result += "DeviceLocal | ";
23867 return "{" + result.substr(0, result.size() - 3) + "}";
23868 }
23869
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023870 VULKAN_HPP_INLINE std::string to_string(AccessFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023871 {
23872 switch (value)
23873 {
23874 case AccessFlagBits::eIndirectCommandRead: return "IndirectCommandRead";
23875 case AccessFlagBits::eIndexRead: return "IndexRead";
23876 case AccessFlagBits::eVertexAttributeRead: return "VertexAttributeRead";
23877 case AccessFlagBits::eUniformRead: return "UniformRead";
23878 case AccessFlagBits::eInputAttachmentRead: return "InputAttachmentRead";
23879 case AccessFlagBits::eShaderRead: return "ShaderRead";
23880 case AccessFlagBits::eShaderWrite: return "ShaderWrite";
23881 case AccessFlagBits::eColorAttachmentRead: return "ColorAttachmentRead";
23882 case AccessFlagBits::eColorAttachmentWrite: return "ColorAttachmentWrite";
23883 case AccessFlagBits::eDepthStencilAttachmentRead: return "DepthStencilAttachmentRead";
23884 case AccessFlagBits::eDepthStencilAttachmentWrite: return "DepthStencilAttachmentWrite";
23885 case AccessFlagBits::eTransferRead: return "TransferRead";
23886 case AccessFlagBits::eTransferWrite: return "TransferWrite";
23887 case AccessFlagBits::eHostRead: return "HostRead";
23888 case AccessFlagBits::eHostWrite: return "HostWrite";
23889 case AccessFlagBits::eMemoryRead: return "MemoryRead";
23890 case AccessFlagBits::eMemoryWrite: return "MemoryWrite";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023891 case AccessFlagBits::eCommandProcessReadNVX: return "CommandProcessReadNVX";
23892 case AccessFlagBits::eCommandProcessWriteNVX: return "CommandProcessWriteNVX";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023893 default: return "invalid";
23894 }
23895 }
23896
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023897 VULKAN_HPP_INLINE std::string to_string(AccessFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023898 {
23899 if (!value) return "{}";
23900 std::string result;
23901 if (value & AccessFlagBits::eIndirectCommandRead) result += "IndirectCommandRead | ";
23902 if (value & AccessFlagBits::eIndexRead) result += "IndexRead | ";
23903 if (value & AccessFlagBits::eVertexAttributeRead) result += "VertexAttributeRead | ";
23904 if (value & AccessFlagBits::eUniformRead) result += "UniformRead | ";
23905 if (value & AccessFlagBits::eInputAttachmentRead) result += "InputAttachmentRead | ";
23906 if (value & AccessFlagBits::eShaderRead) result += "ShaderRead | ";
23907 if (value & AccessFlagBits::eShaderWrite) result += "ShaderWrite | ";
23908 if (value & AccessFlagBits::eColorAttachmentRead) result += "ColorAttachmentRead | ";
23909 if (value & AccessFlagBits::eColorAttachmentWrite) result += "ColorAttachmentWrite | ";
23910 if (value & AccessFlagBits::eDepthStencilAttachmentRead) result += "DepthStencilAttachmentRead | ";
23911 if (value & AccessFlagBits::eDepthStencilAttachmentWrite) result += "DepthStencilAttachmentWrite | ";
23912 if (value & AccessFlagBits::eTransferRead) result += "TransferRead | ";
23913 if (value & AccessFlagBits::eTransferWrite) result += "TransferWrite | ";
23914 if (value & AccessFlagBits::eHostRead) result += "HostRead | ";
23915 if (value & AccessFlagBits::eHostWrite) result += "HostWrite | ";
23916 if (value & AccessFlagBits::eMemoryRead) result += "MemoryRead | ";
23917 if (value & AccessFlagBits::eMemoryWrite) result += "MemoryWrite | ";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023918 if (value & AccessFlagBits::eCommandProcessReadNVX) result += "CommandProcessReadNVX | ";
23919 if (value & AccessFlagBits::eCommandProcessWriteNVX) result += "CommandProcessWriteNVX | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023920 return "{" + result.substr(0, result.size() - 3) + "}";
23921 }
23922
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023923 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023924 {
23925 switch (value)
23926 {
23927 case BufferUsageFlagBits::eTransferSrc: return "TransferSrc";
23928 case BufferUsageFlagBits::eTransferDst: return "TransferDst";
23929 case BufferUsageFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
23930 case BufferUsageFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
23931 case BufferUsageFlagBits::eUniformBuffer: return "UniformBuffer";
23932 case BufferUsageFlagBits::eStorageBuffer: return "StorageBuffer";
23933 case BufferUsageFlagBits::eIndexBuffer: return "IndexBuffer";
23934 case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer";
23935 case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer";
23936 default: return "invalid";
23937 }
23938 }
23939
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023940 VULKAN_HPP_INLINE std::string to_string(BufferUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023941 {
23942 if (!value) return "{}";
23943 std::string result;
23944 if (value & BufferUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
23945 if (value & BufferUsageFlagBits::eTransferDst) result += "TransferDst | ";
23946 if (value & BufferUsageFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
23947 if (value & BufferUsageFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
23948 if (value & BufferUsageFlagBits::eUniformBuffer) result += "UniformBuffer | ";
23949 if (value & BufferUsageFlagBits::eStorageBuffer) result += "StorageBuffer | ";
23950 if (value & BufferUsageFlagBits::eIndexBuffer) result += "IndexBuffer | ";
23951 if (value & BufferUsageFlagBits::eVertexBuffer) result += "VertexBuffer | ";
23952 if (value & BufferUsageFlagBits::eIndirectBuffer) result += "IndirectBuffer | ";
23953 return "{" + result.substr(0, result.size() - 3) + "}";
23954 }
23955
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023956 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023957 {
23958 switch (value)
23959 {
23960 case BufferCreateFlagBits::eSparseBinding: return "SparseBinding";
23961 case BufferCreateFlagBits::eSparseResidency: return "SparseResidency";
23962 case BufferCreateFlagBits::eSparseAliased: return "SparseAliased";
23963 default: return "invalid";
23964 }
23965 }
23966
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023967 VULKAN_HPP_INLINE std::string to_string(BufferCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023968 {
23969 if (!value) return "{}";
23970 std::string result;
23971 if (value & BufferCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
23972 if (value & BufferCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
23973 if (value & BufferCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
23974 return "{" + result.substr(0, result.size() - 3) + "}";
23975 }
23976
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023977 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023978 {
23979 switch (value)
23980 {
23981 case ShaderStageFlagBits::eVertex: return "Vertex";
23982 case ShaderStageFlagBits::eTessellationControl: return "TessellationControl";
23983 case ShaderStageFlagBits::eTessellationEvaluation: return "TessellationEvaluation";
23984 case ShaderStageFlagBits::eGeometry: return "Geometry";
23985 case ShaderStageFlagBits::eFragment: return "Fragment";
23986 case ShaderStageFlagBits::eCompute: return "Compute";
23987 case ShaderStageFlagBits::eAllGraphics: return "AllGraphics";
23988 case ShaderStageFlagBits::eAll: return "All";
23989 default: return "invalid";
23990 }
23991 }
23992
Mark Lobodzinski2d589822016-12-12 09:44:34 -070023993 VULKAN_HPP_INLINE std::string to_string(ShaderStageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060023994 {
23995 if (!value) return "{}";
23996 std::string result;
23997 if (value & ShaderStageFlagBits::eVertex) result += "Vertex | ";
23998 if (value & ShaderStageFlagBits::eTessellationControl) result += "TessellationControl | ";
23999 if (value & ShaderStageFlagBits::eTessellationEvaluation) result += "TessellationEvaluation | ";
24000 if (value & ShaderStageFlagBits::eGeometry) result += "Geometry | ";
24001 if (value & ShaderStageFlagBits::eFragment) result += "Fragment | ";
24002 if (value & ShaderStageFlagBits::eCompute) result += "Compute | ";
24003 if (value & ShaderStageFlagBits::eAllGraphics) result += "AllGraphics | ";
24004 if (value & ShaderStageFlagBits::eAll) result += "All | ";
24005 return "{" + result.substr(0, result.size() - 3) + "}";
24006 }
24007
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024008 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024009 {
24010 switch (value)
24011 {
24012 case ImageUsageFlagBits::eTransferSrc: return "TransferSrc";
24013 case ImageUsageFlagBits::eTransferDst: return "TransferDst";
24014 case ImageUsageFlagBits::eSampled: return "Sampled";
24015 case ImageUsageFlagBits::eStorage: return "Storage";
24016 case ImageUsageFlagBits::eColorAttachment: return "ColorAttachment";
24017 case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
24018 case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment";
24019 case ImageUsageFlagBits::eInputAttachment: return "InputAttachment";
24020 default: return "invalid";
24021 }
24022 }
24023
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024024 VULKAN_HPP_INLINE std::string to_string(ImageUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024025 {
24026 if (!value) return "{}";
24027 std::string result;
24028 if (value & ImageUsageFlagBits::eTransferSrc) result += "TransferSrc | ";
24029 if (value & ImageUsageFlagBits::eTransferDst) result += "TransferDst | ";
24030 if (value & ImageUsageFlagBits::eSampled) result += "Sampled | ";
24031 if (value & ImageUsageFlagBits::eStorage) result += "Storage | ";
24032 if (value & ImageUsageFlagBits::eColorAttachment) result += "ColorAttachment | ";
24033 if (value & ImageUsageFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
24034 if (value & ImageUsageFlagBits::eTransientAttachment) result += "TransientAttachment | ";
24035 if (value & ImageUsageFlagBits::eInputAttachment) result += "InputAttachment | ";
24036 return "{" + result.substr(0, result.size() - 3) + "}";
24037 }
24038
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024039 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024040 {
24041 switch (value)
24042 {
24043 case ImageCreateFlagBits::eSparseBinding: return "SparseBinding";
24044 case ImageCreateFlagBits::eSparseResidency: return "SparseResidency";
24045 case ImageCreateFlagBits::eSparseAliased: return "SparseAliased";
24046 case ImageCreateFlagBits::eMutableFormat: return "MutableFormat";
24047 case ImageCreateFlagBits::eCubeCompatible: return "CubeCompatible";
Mark Young39389872017-01-19 21:10:49 -070024048 case ImageCreateFlagBits::e2DArrayCompatibleKHR: return "2DArrayCompatibleKHR";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024049 default: return "invalid";
24050 }
24051 }
24052
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024053 VULKAN_HPP_INLINE std::string to_string(ImageCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024054 {
24055 if (!value) return "{}";
24056 std::string result;
24057 if (value & ImageCreateFlagBits::eSparseBinding) result += "SparseBinding | ";
24058 if (value & ImageCreateFlagBits::eSparseResidency) result += "SparseResidency | ";
24059 if (value & ImageCreateFlagBits::eSparseAliased) result += "SparseAliased | ";
24060 if (value & ImageCreateFlagBits::eMutableFormat) result += "MutableFormat | ";
24061 if (value & ImageCreateFlagBits::eCubeCompatible) result += "CubeCompatible | ";
Mark Young39389872017-01-19 21:10:49 -070024062 if (value & ImageCreateFlagBits::e2DArrayCompatibleKHR) result += "2DArrayCompatibleKHR | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024063 return "{" + result.substr(0, result.size() - 3) + "}";
24064 }
24065
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024066 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024067 {
24068 switch (value)
24069 {
24070 case PipelineCreateFlagBits::eDisableOptimization: return "DisableOptimization";
24071 case PipelineCreateFlagBits::eAllowDerivatives: return "AllowDerivatives";
24072 case PipelineCreateFlagBits::eDerivative: return "Derivative";
24073 default: return "invalid";
24074 }
24075 }
24076
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024077 VULKAN_HPP_INLINE std::string to_string(PipelineCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024078 {
24079 if (!value) return "{}";
24080 std::string result;
24081 if (value & PipelineCreateFlagBits::eDisableOptimization) result += "DisableOptimization | ";
24082 if (value & PipelineCreateFlagBits::eAllowDerivatives) result += "AllowDerivatives | ";
24083 if (value & PipelineCreateFlagBits::eDerivative) result += "Derivative | ";
24084 return "{" + result.substr(0, result.size() - 3) + "}";
24085 }
24086
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024087 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024088 {
24089 switch (value)
24090 {
24091 case ColorComponentFlagBits::eR: return "R";
24092 case ColorComponentFlagBits::eG: return "G";
24093 case ColorComponentFlagBits::eB: return "B";
24094 case ColorComponentFlagBits::eA: return "A";
24095 default: return "invalid";
24096 }
24097 }
24098
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024099 VULKAN_HPP_INLINE std::string to_string(ColorComponentFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024100 {
24101 if (!value) return "{}";
24102 std::string result;
24103 if (value & ColorComponentFlagBits::eR) result += "R | ";
24104 if (value & ColorComponentFlagBits::eG) result += "G | ";
24105 if (value & ColorComponentFlagBits::eB) result += "B | ";
24106 if (value & ColorComponentFlagBits::eA) result += "A | ";
24107 return "{" + result.substr(0, result.size() - 3) + "}";
24108 }
24109
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024110 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024111 {
24112 switch (value)
24113 {
24114 case FenceCreateFlagBits::eSignaled: return "Signaled";
24115 default: return "invalid";
24116 }
24117 }
24118
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024119 VULKAN_HPP_INLINE std::string to_string(FenceCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024120 {
24121 if (!value) return "{}";
24122 std::string result;
24123 if (value & FenceCreateFlagBits::eSignaled) result += "Signaled | ";
24124 return "{" + result.substr(0, result.size() - 3) + "}";
24125 }
24126
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024127 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024128 {
24129 switch (value)
24130 {
24131 case FormatFeatureFlagBits::eSampledImage: return "SampledImage";
24132 case FormatFeatureFlagBits::eStorageImage: return "StorageImage";
24133 case FormatFeatureFlagBits::eStorageImageAtomic: return "StorageImageAtomic";
24134 case FormatFeatureFlagBits::eUniformTexelBuffer: return "UniformTexelBuffer";
24135 case FormatFeatureFlagBits::eStorageTexelBuffer: return "StorageTexelBuffer";
24136 case FormatFeatureFlagBits::eStorageTexelBufferAtomic: return "StorageTexelBufferAtomic";
24137 case FormatFeatureFlagBits::eVertexBuffer: return "VertexBuffer";
24138 case FormatFeatureFlagBits::eColorAttachment: return "ColorAttachment";
24139 case FormatFeatureFlagBits::eColorAttachmentBlend: return "ColorAttachmentBlend";
24140 case FormatFeatureFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment";
24141 case FormatFeatureFlagBits::eBlitSrc: return "BlitSrc";
24142 case FormatFeatureFlagBits::eBlitDst: return "BlitDst";
24143 case FormatFeatureFlagBits::eSampledImageFilterLinear: return "SampledImageFilterLinear";
24144 case FormatFeatureFlagBits::eSampledImageFilterCubicIMG: return "SampledImageFilterCubicIMG";
Mark Young39389872017-01-19 21:10:49 -070024145 case FormatFeatureFlagBits::eTransferSrcKHR: return "TransferSrcKHR";
24146 case FormatFeatureFlagBits::eTransferDstKHR: return "TransferDstKHR";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024147 default: return "invalid";
24148 }
24149 }
24150
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024151 VULKAN_HPP_INLINE std::string to_string(FormatFeatureFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024152 {
24153 if (!value) return "{}";
24154 std::string result;
24155 if (value & FormatFeatureFlagBits::eSampledImage) result += "SampledImage | ";
24156 if (value & FormatFeatureFlagBits::eStorageImage) result += "StorageImage | ";
24157 if (value & FormatFeatureFlagBits::eStorageImageAtomic) result += "StorageImageAtomic | ";
24158 if (value & FormatFeatureFlagBits::eUniformTexelBuffer) result += "UniformTexelBuffer | ";
24159 if (value & FormatFeatureFlagBits::eStorageTexelBuffer) result += "StorageTexelBuffer | ";
24160 if (value & FormatFeatureFlagBits::eStorageTexelBufferAtomic) result += "StorageTexelBufferAtomic | ";
24161 if (value & FormatFeatureFlagBits::eVertexBuffer) result += "VertexBuffer | ";
24162 if (value & FormatFeatureFlagBits::eColorAttachment) result += "ColorAttachment | ";
24163 if (value & FormatFeatureFlagBits::eColorAttachmentBlend) result += "ColorAttachmentBlend | ";
24164 if (value & FormatFeatureFlagBits::eDepthStencilAttachment) result += "DepthStencilAttachment | ";
24165 if (value & FormatFeatureFlagBits::eBlitSrc) result += "BlitSrc | ";
24166 if (value & FormatFeatureFlagBits::eBlitDst) result += "BlitDst | ";
24167 if (value & FormatFeatureFlagBits::eSampledImageFilterLinear) result += "SampledImageFilterLinear | ";
24168 if (value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG) result += "SampledImageFilterCubicIMG | ";
Mark Young39389872017-01-19 21:10:49 -070024169 if (value & FormatFeatureFlagBits::eTransferSrcKHR) result += "TransferSrcKHR | ";
24170 if (value & FormatFeatureFlagBits::eTransferDstKHR) result += "TransferDstKHR | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024171 return "{" + result.substr(0, result.size() - 3) + "}";
24172 }
24173
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024174 VULKAN_HPP_INLINE std::string to_string(QueryControlFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024175 {
24176 switch (value)
24177 {
24178 case QueryControlFlagBits::ePrecise: return "Precise";
24179 default: return "invalid";
24180 }
24181 }
24182
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024183 VULKAN_HPP_INLINE std::string to_string(QueryControlFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024184 {
24185 if (!value) return "{}";
24186 std::string result;
24187 if (value & QueryControlFlagBits::ePrecise) result += "Precise | ";
24188 return "{" + result.substr(0, result.size() - 3) + "}";
24189 }
24190
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024191 VULKAN_HPP_INLINE std::string to_string(QueryResultFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024192 {
24193 switch (value)
24194 {
24195 case QueryResultFlagBits::e64: return "64";
24196 case QueryResultFlagBits::eWait: return "Wait";
24197 case QueryResultFlagBits::eWithAvailability: return "WithAvailability";
24198 case QueryResultFlagBits::ePartial: return "Partial";
24199 default: return "invalid";
24200 }
24201 }
24202
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024203 VULKAN_HPP_INLINE std::string to_string(QueryResultFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024204 {
24205 if (!value) return "{}";
24206 std::string result;
24207 if (value & QueryResultFlagBits::e64) result += "64 | ";
24208 if (value & QueryResultFlagBits::eWait) result += "Wait | ";
24209 if (value & QueryResultFlagBits::eWithAvailability) result += "WithAvailability | ";
24210 if (value & QueryResultFlagBits::ePartial) result += "Partial | ";
24211 return "{" + result.substr(0, result.size() - 3) + "}";
24212 }
24213
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024214 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024215 {
24216 switch (value)
24217 {
24218 case CommandBufferUsageFlagBits::eOneTimeSubmit: return "OneTimeSubmit";
24219 case CommandBufferUsageFlagBits::eRenderPassContinue: return "RenderPassContinue";
24220 case CommandBufferUsageFlagBits::eSimultaneousUse: return "SimultaneousUse";
24221 default: return "invalid";
24222 }
24223 }
24224
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024225 VULKAN_HPP_INLINE std::string to_string(CommandBufferUsageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024226 {
24227 if (!value) return "{}";
24228 std::string result;
24229 if (value & CommandBufferUsageFlagBits::eOneTimeSubmit) result += "OneTimeSubmit | ";
24230 if (value & CommandBufferUsageFlagBits::eRenderPassContinue) result += "RenderPassContinue | ";
24231 if (value & CommandBufferUsageFlagBits::eSimultaneousUse) result += "SimultaneousUse | ";
24232 return "{" + result.substr(0, result.size() - 3) + "}";
24233 }
24234
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024235 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024236 {
24237 switch (value)
24238 {
24239 case QueryPipelineStatisticFlagBits::eInputAssemblyVertices: return "InputAssemblyVertices";
24240 case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives: return "InputAssemblyPrimitives";
24241 case QueryPipelineStatisticFlagBits::eVertexShaderInvocations: return "VertexShaderInvocations";
24242 case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations: return "GeometryShaderInvocations";
24243 case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives: return "GeometryShaderPrimitives";
24244 case QueryPipelineStatisticFlagBits::eClippingInvocations: return "ClippingInvocations";
24245 case QueryPipelineStatisticFlagBits::eClippingPrimitives: return "ClippingPrimitives";
24246 case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations: return "FragmentShaderInvocations";
24247 case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches: return "TessellationControlShaderPatches";
24248 case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations: return "TessellationEvaluationShaderInvocations";
24249 case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations";
24250 default: return "invalid";
24251 }
24252 }
24253
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024254 VULKAN_HPP_INLINE std::string to_string(QueryPipelineStatisticFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024255 {
24256 if (!value) return "{}";
24257 std::string result;
24258 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices) result += "InputAssemblyVertices | ";
24259 if (value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) result += "InputAssemblyPrimitives | ";
24260 if (value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations) result += "VertexShaderInvocations | ";
24261 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) result += "GeometryShaderInvocations | ";
24262 if (value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) result += "GeometryShaderPrimitives | ";
24263 if (value & QueryPipelineStatisticFlagBits::eClippingInvocations) result += "ClippingInvocations | ";
24264 if (value & QueryPipelineStatisticFlagBits::eClippingPrimitives) result += "ClippingPrimitives | ";
24265 if (value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) result += "FragmentShaderInvocations | ";
24266 if (value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) result += "TessellationControlShaderPatches | ";
24267 if (value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) result += "TessellationEvaluationShaderInvocations | ";
24268 if (value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations) result += "ComputeShaderInvocations | ";
24269 return "{" + result.substr(0, result.size() - 3) + "}";
24270 }
24271
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024272 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024273 {
24274 switch (value)
24275 {
24276 case ImageAspectFlagBits::eColor: return "Color";
24277 case ImageAspectFlagBits::eDepth: return "Depth";
24278 case ImageAspectFlagBits::eStencil: return "Stencil";
24279 case ImageAspectFlagBits::eMetadata: return "Metadata";
24280 default: return "invalid";
24281 }
24282 }
24283
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024284 VULKAN_HPP_INLINE std::string to_string(ImageAspectFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024285 {
24286 if (!value) return "{}";
24287 std::string result;
24288 if (value & ImageAspectFlagBits::eColor) result += "Color | ";
24289 if (value & ImageAspectFlagBits::eDepth) result += "Depth | ";
24290 if (value & ImageAspectFlagBits::eStencil) result += "Stencil | ";
24291 if (value & ImageAspectFlagBits::eMetadata) result += "Metadata | ";
24292 return "{" + result.substr(0, result.size() - 3) + "}";
24293 }
24294
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024295 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024296 {
24297 switch (value)
24298 {
24299 case SparseImageFormatFlagBits::eSingleMiptail: return "SingleMiptail";
24300 case SparseImageFormatFlagBits::eAlignedMipSize: return "AlignedMipSize";
24301 case SparseImageFormatFlagBits::eNonstandardBlockSize: return "NonstandardBlockSize";
24302 default: return "invalid";
24303 }
24304 }
24305
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024306 VULKAN_HPP_INLINE std::string to_string(SparseImageFormatFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024307 {
24308 if (!value) return "{}";
24309 std::string result;
24310 if (value & SparseImageFormatFlagBits::eSingleMiptail) result += "SingleMiptail | ";
24311 if (value & SparseImageFormatFlagBits::eAlignedMipSize) result += "AlignedMipSize | ";
24312 if (value & SparseImageFormatFlagBits::eNonstandardBlockSize) result += "NonstandardBlockSize | ";
24313 return "{" + result.substr(0, result.size() - 3) + "}";
24314 }
24315
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024316 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024317 {
24318 switch (value)
24319 {
24320 case SparseMemoryBindFlagBits::eMetadata: return "Metadata";
24321 default: return "invalid";
24322 }
24323 }
24324
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024325 VULKAN_HPP_INLINE std::string to_string(SparseMemoryBindFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024326 {
24327 if (!value) return "{}";
24328 std::string result;
24329 if (value & SparseMemoryBindFlagBits::eMetadata) result += "Metadata | ";
24330 return "{" + result.substr(0, result.size() - 3) + "}";
24331 }
24332
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024333 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024334 {
24335 switch (value)
24336 {
24337 case PipelineStageFlagBits::eTopOfPipe: return "TopOfPipe";
24338 case PipelineStageFlagBits::eDrawIndirect: return "DrawIndirect";
24339 case PipelineStageFlagBits::eVertexInput: return "VertexInput";
24340 case PipelineStageFlagBits::eVertexShader: return "VertexShader";
24341 case PipelineStageFlagBits::eTessellationControlShader: return "TessellationControlShader";
24342 case PipelineStageFlagBits::eTessellationEvaluationShader: return "TessellationEvaluationShader";
24343 case PipelineStageFlagBits::eGeometryShader: return "GeometryShader";
24344 case PipelineStageFlagBits::eFragmentShader: return "FragmentShader";
24345 case PipelineStageFlagBits::eEarlyFragmentTests: return "EarlyFragmentTests";
24346 case PipelineStageFlagBits::eLateFragmentTests: return "LateFragmentTests";
24347 case PipelineStageFlagBits::eColorAttachmentOutput: return "ColorAttachmentOutput";
24348 case PipelineStageFlagBits::eComputeShader: return "ComputeShader";
24349 case PipelineStageFlagBits::eTransfer: return "Transfer";
24350 case PipelineStageFlagBits::eBottomOfPipe: return "BottomOfPipe";
24351 case PipelineStageFlagBits::eHost: return "Host";
24352 case PipelineStageFlagBits::eAllGraphics: return "AllGraphics";
24353 case PipelineStageFlagBits::eAllCommands: return "AllCommands";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024354 case PipelineStageFlagBits::eCommandProcessNVX: return "CommandProcessNVX";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024355 default: return "invalid";
24356 }
24357 }
24358
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024359 VULKAN_HPP_INLINE std::string to_string(PipelineStageFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024360 {
24361 if (!value) return "{}";
24362 std::string result;
24363 if (value & PipelineStageFlagBits::eTopOfPipe) result += "TopOfPipe | ";
24364 if (value & PipelineStageFlagBits::eDrawIndirect) result += "DrawIndirect | ";
24365 if (value & PipelineStageFlagBits::eVertexInput) result += "VertexInput | ";
24366 if (value & PipelineStageFlagBits::eVertexShader) result += "VertexShader | ";
24367 if (value & PipelineStageFlagBits::eTessellationControlShader) result += "TessellationControlShader | ";
24368 if (value & PipelineStageFlagBits::eTessellationEvaluationShader) result += "TessellationEvaluationShader | ";
24369 if (value & PipelineStageFlagBits::eGeometryShader) result += "GeometryShader | ";
24370 if (value & PipelineStageFlagBits::eFragmentShader) result += "FragmentShader | ";
24371 if (value & PipelineStageFlagBits::eEarlyFragmentTests) result += "EarlyFragmentTests | ";
24372 if (value & PipelineStageFlagBits::eLateFragmentTests) result += "LateFragmentTests | ";
24373 if (value & PipelineStageFlagBits::eColorAttachmentOutput) result += "ColorAttachmentOutput | ";
24374 if (value & PipelineStageFlagBits::eComputeShader) result += "ComputeShader | ";
24375 if (value & PipelineStageFlagBits::eTransfer) result += "Transfer | ";
24376 if (value & PipelineStageFlagBits::eBottomOfPipe) result += "BottomOfPipe | ";
24377 if (value & PipelineStageFlagBits::eHost) result += "Host | ";
24378 if (value & PipelineStageFlagBits::eAllGraphics) result += "AllGraphics | ";
24379 if (value & PipelineStageFlagBits::eAllCommands) result += "AllCommands | ";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024380 if (value & PipelineStageFlagBits::eCommandProcessNVX) result += "CommandProcessNVX | ";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024381 return "{" + result.substr(0, result.size() - 3) + "}";
24382 }
24383
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024384 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024385 {
24386 switch (value)
24387 {
24388 case CommandPoolCreateFlagBits::eTransient: return "Transient";
24389 case CommandPoolCreateFlagBits::eResetCommandBuffer: return "ResetCommandBuffer";
24390 default: return "invalid";
24391 }
24392 }
24393
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024394 VULKAN_HPP_INLINE std::string to_string(CommandPoolCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024395 {
24396 if (!value) return "{}";
24397 std::string result;
24398 if (value & CommandPoolCreateFlagBits::eTransient) result += "Transient | ";
24399 if (value & CommandPoolCreateFlagBits::eResetCommandBuffer) result += "ResetCommandBuffer | ";
24400 return "{" + result.substr(0, result.size() - 3) + "}";
24401 }
24402
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024403 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024404 {
24405 switch (value)
24406 {
24407 case CommandPoolResetFlagBits::eReleaseResources: return "ReleaseResources";
24408 default: return "invalid";
24409 }
24410 }
24411
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024412 VULKAN_HPP_INLINE std::string to_string(CommandPoolResetFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024413 {
24414 if (!value) return "{}";
24415 std::string result;
24416 if (value & CommandPoolResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
24417 return "{" + result.substr(0, result.size() - 3) + "}";
24418 }
24419
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024420 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024421 {
24422 switch (value)
24423 {
24424 case CommandBufferResetFlagBits::eReleaseResources: return "ReleaseResources";
24425 default: return "invalid";
24426 }
24427 }
24428
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024429 VULKAN_HPP_INLINE std::string to_string(CommandBufferResetFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024430 {
24431 if (!value) return "{}";
24432 std::string result;
24433 if (value & CommandBufferResetFlagBits::eReleaseResources) result += "ReleaseResources | ";
24434 return "{" + result.substr(0, result.size() - 3) + "}";
24435 }
24436
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024437 VULKAN_HPP_INLINE std::string to_string(SampleCountFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024438 {
24439 switch (value)
24440 {
24441 case SampleCountFlagBits::e1: return "1";
24442 case SampleCountFlagBits::e2: return "2";
24443 case SampleCountFlagBits::e4: return "4";
24444 case SampleCountFlagBits::e8: return "8";
24445 case SampleCountFlagBits::e16: return "16";
24446 case SampleCountFlagBits::e32: return "32";
24447 case SampleCountFlagBits::e64: return "64";
24448 default: return "invalid";
24449 }
24450 }
24451
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024452 VULKAN_HPP_INLINE std::string to_string(SampleCountFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024453 {
24454 if (!value) return "{}";
24455 std::string result;
24456 if (value & SampleCountFlagBits::e1) result += "1 | ";
24457 if (value & SampleCountFlagBits::e2) result += "2 | ";
24458 if (value & SampleCountFlagBits::e4) result += "4 | ";
24459 if (value & SampleCountFlagBits::e8) result += "8 | ";
24460 if (value & SampleCountFlagBits::e16) result += "16 | ";
24461 if (value & SampleCountFlagBits::e32) result += "32 | ";
24462 if (value & SampleCountFlagBits::e64) result += "64 | ";
24463 return "{" + result.substr(0, result.size() - 3) + "}";
24464 }
24465
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024466 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024467 {
24468 switch (value)
24469 {
24470 case AttachmentDescriptionFlagBits::eMayAlias: return "MayAlias";
24471 default: return "invalid";
24472 }
24473 }
24474
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024475 VULKAN_HPP_INLINE std::string to_string(AttachmentDescriptionFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024476 {
24477 if (!value) return "{}";
24478 std::string result;
24479 if (value & AttachmentDescriptionFlagBits::eMayAlias) result += "MayAlias | ";
24480 return "{" + result.substr(0, result.size() - 3) + "}";
24481 }
24482
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024483 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024484 {
24485 switch (value)
24486 {
24487 case StencilFaceFlagBits::eFront: return "Front";
24488 case StencilFaceFlagBits::eBack: return "Back";
24489 case StencilFaceFlagBits::eVkStencilFrontAndBack: return "VkStencilFrontAndBack";
24490 default: return "invalid";
24491 }
24492 }
24493
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024494 VULKAN_HPP_INLINE std::string to_string(StencilFaceFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024495 {
24496 if (!value) return "{}";
24497 std::string result;
24498 if (value & StencilFaceFlagBits::eFront) result += "Front | ";
24499 if (value & StencilFaceFlagBits::eBack) result += "Back | ";
24500 if (value & StencilFaceFlagBits::eVkStencilFrontAndBack) result += "VkStencilFrontAndBack | ";
24501 return "{" + result.substr(0, result.size() - 3) + "}";
24502 }
24503
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024504 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024505 {
24506 switch (value)
24507 {
24508 case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet";
24509 default: return "invalid";
24510 }
24511 }
24512
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024513 VULKAN_HPP_INLINE std::string to_string(DescriptorPoolCreateFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024514 {
24515 if (!value) return "{}";
24516 std::string result;
24517 if (value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet) result += "FreeDescriptorSet | ";
24518 return "{" + result.substr(0, result.size() - 3) + "}";
24519 }
24520
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024521 VULKAN_HPP_INLINE std::string to_string(DependencyFlagBits value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024522 {
24523 switch (value)
24524 {
24525 case DependencyFlagBits::eByRegion: return "ByRegion";
24526 default: return "invalid";
24527 }
24528 }
24529
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024530 VULKAN_HPP_INLINE std::string to_string(DependencyFlags value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024531 {
24532 if (!value) return "{}";
24533 std::string result;
24534 if (value & DependencyFlagBits::eByRegion) result += "ByRegion | ";
24535 return "{" + result.substr(0, result.size() - 3) + "}";
24536 }
24537
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024538 VULKAN_HPP_INLINE std::string to_string(PresentModeKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024539 {
24540 switch (value)
24541 {
24542 case PresentModeKHR::eImmediate: return "Immediate";
24543 case PresentModeKHR::eMailbox: return "Mailbox";
24544 case PresentModeKHR::eFifo: return "Fifo";
24545 case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed";
24546 default: return "invalid";
24547 }
24548 }
24549
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024550 VULKAN_HPP_INLINE std::string to_string(ColorSpaceKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024551 {
24552 switch (value)
24553 {
24554 case ColorSpaceKHR::eSrgbNonlinear: return "SrgbNonlinear";
Mark Young39389872017-01-19 21:10:49 -070024555 case ColorSpaceKHR::eDisplayP3LinearEXT: return "DisplayP3LinearEXT";
24556 case ColorSpaceKHR::eDisplayP3NonlinearEXT: return "DisplayP3NonlinearEXT";
24557 case ColorSpaceKHR::eScrgbLinearEXT: return "ScrgbLinearEXT";
24558 case ColorSpaceKHR::eScrgbNonlinearEXT: return "ScrgbNonlinearEXT";
24559 case ColorSpaceKHR::eDciP3LinearEXT: return "DciP3LinearEXT";
24560 case ColorSpaceKHR::eDciP3NonlinearEXT: return "DciP3NonlinearEXT";
24561 case ColorSpaceKHR::eBt709LinearEXT: return "Bt709LinearEXT";
24562 case ColorSpaceKHR::eBt709NonlinearEXT: return "Bt709NonlinearEXT";
24563 case ColorSpaceKHR::eBt2020LinearEXT: return "Bt2020LinearEXT";
24564 case ColorSpaceKHR::eBt2020NonlinearEXT: return "Bt2020NonlinearEXT";
24565 case ColorSpaceKHR::eAdobergbLinearEXT: return "AdobergbLinearEXT";
24566 case ColorSpaceKHR::eAdobergbNonlinearEXT: return "AdobergbNonlinearEXT";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024567 default: return "invalid";
24568 }
24569 }
24570
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024571 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024572 {
24573 switch (value)
24574 {
24575 case DisplayPlaneAlphaFlagBitsKHR::eOpaque: return "Opaque";
24576 case DisplayPlaneAlphaFlagBitsKHR::eGlobal: return "Global";
24577 case DisplayPlaneAlphaFlagBitsKHR::ePerPixel: return "PerPixel";
24578 case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied: return "PerPixelPremultiplied";
24579 default: return "invalid";
24580 }
24581 }
24582
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024583 VULKAN_HPP_INLINE std::string to_string(DisplayPlaneAlphaFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024584 {
24585 if (!value) return "{}";
24586 std::string result;
24587 if (value & DisplayPlaneAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
24588 if (value & DisplayPlaneAlphaFlagBitsKHR::eGlobal) result += "Global | ";
24589 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel) result += "PerPixel | ";
24590 if (value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) result += "PerPixelPremultiplied | ";
24591 return "{" + result.substr(0, result.size() - 3) + "}";
24592 }
24593
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024594 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024595 {
24596 switch (value)
24597 {
24598 case CompositeAlphaFlagBitsKHR::eOpaque: return "Opaque";
24599 case CompositeAlphaFlagBitsKHR::ePreMultiplied: return "PreMultiplied";
24600 case CompositeAlphaFlagBitsKHR::ePostMultiplied: return "PostMultiplied";
24601 case CompositeAlphaFlagBitsKHR::eInherit: return "Inherit";
24602 default: return "invalid";
24603 }
24604 }
24605
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024606 VULKAN_HPP_INLINE std::string to_string(CompositeAlphaFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024607 {
24608 if (!value) return "{}";
24609 std::string result;
24610 if (value & CompositeAlphaFlagBitsKHR::eOpaque) result += "Opaque | ";
24611 if (value & CompositeAlphaFlagBitsKHR::ePreMultiplied) result += "PreMultiplied | ";
24612 if (value & CompositeAlphaFlagBitsKHR::ePostMultiplied) result += "PostMultiplied | ";
24613 if (value & CompositeAlphaFlagBitsKHR::eInherit) result += "Inherit | ";
24614 return "{" + result.substr(0, result.size() - 3) + "}";
24615 }
24616
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024617 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagBitsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024618 {
24619 switch (value)
24620 {
24621 case SurfaceTransformFlagBitsKHR::eIdentity: return "Identity";
24622 case SurfaceTransformFlagBitsKHR::eRotate90: return "Rotate90";
24623 case SurfaceTransformFlagBitsKHR::eRotate180: return "Rotate180";
24624 case SurfaceTransformFlagBitsKHR::eRotate270: return "Rotate270";
24625 case SurfaceTransformFlagBitsKHR::eHorizontalMirror: return "HorizontalMirror";
24626 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90: return "HorizontalMirrorRotate90";
24627 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180: return "HorizontalMirrorRotate180";
24628 case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270: return "HorizontalMirrorRotate270";
24629 case SurfaceTransformFlagBitsKHR::eInherit: return "Inherit";
24630 default: return "invalid";
24631 }
24632 }
24633
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024634 VULKAN_HPP_INLINE std::string to_string(SurfaceTransformFlagsKHR value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024635 {
24636 if (!value) return "{}";
24637 std::string result;
24638 if (value & SurfaceTransformFlagBitsKHR::eIdentity) result += "Identity | ";
24639 if (value & SurfaceTransformFlagBitsKHR::eRotate90) result += "Rotate90 | ";
24640 if (value & SurfaceTransformFlagBitsKHR::eRotate180) result += "Rotate180 | ";
24641 if (value & SurfaceTransformFlagBitsKHR::eRotate270) result += "Rotate270 | ";
24642 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirror) result += "HorizontalMirror | ";
24643 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) result += "HorizontalMirrorRotate90 | ";
24644 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) result += "HorizontalMirrorRotate180 | ";
24645 if (value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) result += "HorizontalMirrorRotate270 | ";
24646 if (value & SurfaceTransformFlagBitsKHR::eInherit) result += "Inherit | ";
24647 return "{" + result.substr(0, result.size() - 3) + "}";
24648 }
24649
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024650 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagBitsEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024651 {
24652 switch (value)
24653 {
24654 case DebugReportFlagBitsEXT::eInformation: return "Information";
24655 case DebugReportFlagBitsEXT::eWarning: return "Warning";
24656 case DebugReportFlagBitsEXT::ePerformanceWarning: return "PerformanceWarning";
24657 case DebugReportFlagBitsEXT::eError: return "Error";
24658 case DebugReportFlagBitsEXT::eDebug: return "Debug";
24659 default: return "invalid";
24660 }
24661 }
24662
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024663 VULKAN_HPP_INLINE std::string to_string(DebugReportFlagsEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024664 {
24665 if (!value) return "{}";
24666 std::string result;
24667 if (value & DebugReportFlagBitsEXT::eInformation) result += "Information | ";
24668 if (value & DebugReportFlagBitsEXT::eWarning) result += "Warning | ";
24669 if (value & DebugReportFlagBitsEXT::ePerformanceWarning) result += "PerformanceWarning | ";
24670 if (value & DebugReportFlagBitsEXT::eError) result += "Error | ";
24671 if (value & DebugReportFlagBitsEXT::eDebug) result += "Debug | ";
24672 return "{" + result.substr(0, result.size() - 3) + "}";
24673 }
24674
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024675 VULKAN_HPP_INLINE std::string to_string(DebugReportObjectTypeEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024676 {
24677 switch (value)
24678 {
24679 case DebugReportObjectTypeEXT::eUnknown: return "Unknown";
24680 case DebugReportObjectTypeEXT::eInstance: return "Instance";
24681 case DebugReportObjectTypeEXT::ePhysicalDevice: return "PhysicalDevice";
24682 case DebugReportObjectTypeEXT::eDevice: return "Device";
24683 case DebugReportObjectTypeEXT::eQueue: return "Queue";
24684 case DebugReportObjectTypeEXT::eSemaphore: return "Semaphore";
24685 case DebugReportObjectTypeEXT::eCommandBuffer: return "CommandBuffer";
24686 case DebugReportObjectTypeEXT::eFence: return "Fence";
24687 case DebugReportObjectTypeEXT::eDeviceMemory: return "DeviceMemory";
24688 case DebugReportObjectTypeEXT::eBuffer: return "Buffer";
24689 case DebugReportObjectTypeEXT::eImage: return "Image";
24690 case DebugReportObjectTypeEXT::eEvent: return "Event";
24691 case DebugReportObjectTypeEXT::eQueryPool: return "QueryPool";
24692 case DebugReportObjectTypeEXT::eBufferView: return "BufferView";
24693 case DebugReportObjectTypeEXT::eImageView: return "ImageView";
24694 case DebugReportObjectTypeEXT::eShaderModule: return "ShaderModule";
24695 case DebugReportObjectTypeEXT::ePipelineCache: return "PipelineCache";
24696 case DebugReportObjectTypeEXT::ePipelineLayout: return "PipelineLayout";
24697 case DebugReportObjectTypeEXT::eRenderPass: return "RenderPass";
24698 case DebugReportObjectTypeEXT::ePipeline: return "Pipeline";
24699 case DebugReportObjectTypeEXT::eDescriptorSetLayout: return "DescriptorSetLayout";
24700 case DebugReportObjectTypeEXT::eSampler: return "Sampler";
24701 case DebugReportObjectTypeEXT::eDescriptorPool: return "DescriptorPool";
24702 case DebugReportObjectTypeEXT::eDescriptorSet: return "DescriptorSet";
24703 case DebugReportObjectTypeEXT::eFramebuffer: return "Framebuffer";
24704 case DebugReportObjectTypeEXT::eCommandPool: return "CommandPool";
24705 case DebugReportObjectTypeEXT::eSurfaceKhr: return "SurfaceKhr";
24706 case DebugReportObjectTypeEXT::eSwapchainKhr: return "SwapchainKhr";
24707 case DebugReportObjectTypeEXT::eDebugReport: return "DebugReport";
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024708 case DebugReportObjectTypeEXT::eDisplayKhr: return "DisplayKhr";
24709 case DebugReportObjectTypeEXT::eDisplayModeKhr: return "DisplayModeKhr";
24710 case DebugReportObjectTypeEXT::eObjectTableNvx: return "ObjectTableNvx";
24711 case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNvx: return "IndirectCommandsLayoutNvx";
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024712 default: return "invalid";
24713 }
24714 }
24715
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024716 VULKAN_HPP_INLINE std::string to_string(DebugReportErrorEXT value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024717 {
24718 switch (value)
24719 {
24720 case DebugReportErrorEXT::eNone: return "None";
24721 case DebugReportErrorEXT::eCallbackRef: return "CallbackRef";
24722 default: return "invalid";
24723 }
24724 }
24725
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024726 VULKAN_HPP_INLINE std::string to_string(RasterizationOrderAMD value)
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024727 {
24728 switch (value)
24729 {
24730 case RasterizationOrderAMD::eStrict: return "Strict";
24731 case RasterizationOrderAMD::eRelaxed: return "Relaxed";
24732 default: return "invalid";
24733 }
24734 }
24735
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024736 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagBitsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060024737 {
24738 switch (value)
24739 {
24740 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32: return "OpaqueWin32";
24741 case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt: return "OpaqueWin32Kmt";
24742 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image: return "D3D11Image";
24743 case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt: return "D3D11ImageKmt";
24744 default: return "invalid";
24745 }
24746 }
24747
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024748 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryHandleTypeFlagsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060024749 {
24750 if (!value) return "{}";
24751 std::string result;
24752 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) result += "OpaqueWin32 | ";
24753 if (value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) result += "OpaqueWin32Kmt | ";
24754 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) result += "D3D11Image | ";
24755 if (value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) result += "D3D11ImageKmt | ";
24756 return "{" + result.substr(0, result.size() - 3) + "}";
24757 }
24758
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024759 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagBitsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060024760 {
24761 switch (value)
24762 {
24763 case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly: return "DedicatedOnly";
24764 case ExternalMemoryFeatureFlagBitsNV::eExportable: return "Exportable";
24765 case ExternalMemoryFeatureFlagBitsNV::eImportable: return "Importable";
24766 default: return "invalid";
24767 }
24768 }
24769
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024770 VULKAN_HPP_INLINE std::string to_string(ExternalMemoryFeatureFlagsNV value)
Lenny Komow6501c122016-08-31 15:03:49 -060024771 {
24772 if (!value) return "{}";
24773 std::string result;
24774 if (value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) result += "DedicatedOnly | ";
24775 if (value & ExternalMemoryFeatureFlagBitsNV::eExportable) result += "Exportable | ";
24776 if (value & ExternalMemoryFeatureFlagBitsNV::eImportable) result += "Importable | ";
24777 return "{" + result.substr(0, result.size() - 3) + "}";
24778 }
24779
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024780 VULKAN_HPP_INLINE std::string to_string(ValidationCheckEXT value)
Lenny Komow68432d72016-09-29 14:16:59 -060024781 {
24782 switch (value)
24783 {
24784 case ValidationCheckEXT::eAll: return "All";
24785 default: return "invalid";
24786 }
24787 }
24788
Mark Lobodzinski2d589822016-12-12 09:44:34 -070024789 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagBitsNVX value)
24790 {
24791 switch (value)
24792 {
24793 case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences: return "UnorderedSequences";
24794 case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences: return "SparseSequences";
24795 case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions: return "EmptyExecutions";
24796 case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences: return "IndexedSequences";
24797 default: return "invalid";
24798 }
24799 }
24800
24801 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsLayoutUsageFlagsNVX value)
24802 {
24803 if (!value) return "{}";
24804 std::string result;
24805 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) result += "UnorderedSequences | ";
24806 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) result += "SparseSequences | ";
24807 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) result += "EmptyExecutions | ";
24808 if (value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) result += "IndexedSequences | ";
24809 return "{" + result.substr(0, result.size() - 3) + "}";
24810 }
24811
24812 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagBitsNVX value)
24813 {
24814 switch (value)
24815 {
24816 case ObjectEntryUsageFlagBitsNVX::eGraphics: return "Graphics";
24817 case ObjectEntryUsageFlagBitsNVX::eCompute: return "Compute";
24818 default: return "invalid";
24819 }
24820 }
24821
24822 VULKAN_HPP_INLINE std::string to_string(ObjectEntryUsageFlagsNVX value)
24823 {
24824 if (!value) return "{}";
24825 std::string result;
24826 if (value & ObjectEntryUsageFlagBitsNVX::eGraphics) result += "Graphics | ";
24827 if (value & ObjectEntryUsageFlagBitsNVX::eCompute) result += "Compute | ";
24828 return "{" + result.substr(0, result.size() - 3) + "}";
24829 }
24830
24831 VULKAN_HPP_INLINE std::string to_string(IndirectCommandsTokenTypeNVX value)
24832 {
24833 switch (value)
24834 {
24835 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPipeline: return "VkIndirectCommandsTokenPipeline";
24836 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDescriptorSet: return "VkIndirectCommandsTokenDescriptorSet";
24837 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenIndexBuffer: return "VkIndirectCommandsTokenIndexBuffer";
24838 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenVertexBuffer: return "VkIndirectCommandsTokenVertexBuffer";
24839 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenPushConstant: return "VkIndirectCommandsTokenPushConstant";
24840 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDrawIndexed: return "VkIndirectCommandsTokenDrawIndexed";
24841 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDraw: return "VkIndirectCommandsTokenDraw";
24842 case IndirectCommandsTokenTypeNVX::eVkIndirectCommandsTokenDispatch: return "VkIndirectCommandsTokenDispatch";
24843 default: return "invalid";
24844 }
24845 }
24846
24847 VULKAN_HPP_INLINE std::string to_string(ObjectEntryTypeNVX value)
24848 {
24849 switch (value)
24850 {
24851 case ObjectEntryTypeNVX::eVkObjectEntryDescriptorSet: return "VkObjectEntryDescriptorSet";
24852 case ObjectEntryTypeNVX::eVkObjectEntryPipeline: return "VkObjectEntryPipeline";
24853 case ObjectEntryTypeNVX::eVkObjectEntryIndexBuffer: return "VkObjectEntryIndexBuffer";
24854 case ObjectEntryTypeNVX::eVkObjectEntryVertexBuffer: return "VkObjectEntryVertexBuffer";
24855 case ObjectEntryTypeNVX::eVkObjectEntryPushConstant: return "VkObjectEntryPushConstant";
24856 default: return "invalid";
24857 }
24858 }
24859
Mark Young39389872017-01-19 21:10:49 -070024860 VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagBitsEXT value)
24861 {
24862 switch (value)
24863 {
24864 case SurfaceCounterFlagBitsEXT::eVblankExt: return "VblankExt";
24865 default: return "invalid";
24866 }
24867 }
24868
24869 VULKAN_HPP_INLINE std::string to_string(SurfaceCounterFlagsEXT value)
24870 {
24871 if (!value) return "{}";
24872 std::string result;
24873 if (value & SurfaceCounterFlagBitsEXT::eVblankExt) result += "VblankExt | ";
24874 return "{" + result.substr(0, result.size() - 3) + "}";
24875 }
24876
24877 VULKAN_HPP_INLINE std::string to_string(DisplayPowerStateEXT value)
24878 {
24879 switch (value)
24880 {
24881 case DisplayPowerStateEXT::eOff: return "Off";
24882 case DisplayPowerStateEXT::eSuspend: return "Suspend";
24883 case DisplayPowerStateEXT::eOn: return "On";
24884 default: return "invalid";
24885 }
24886 }
24887
24888 VULKAN_HPP_INLINE std::string to_string(DeviceEventTypeEXT value)
24889 {
24890 switch (value)
24891 {
24892 case DeviceEventTypeEXT::eDisplayHotplug: return "DisplayHotplug";
24893 default: return "invalid";
24894 }
24895 }
24896
24897 VULKAN_HPP_INLINE std::string to_string(DisplayEventTypeEXT value)
24898 {
24899 switch (value)
24900 {
24901 case DisplayEventTypeEXT::eFirstPixelOut: return "FirstPixelOut";
24902 default: return "invalid";
24903 }
24904 }
24905
Lenny Komowbed9b5c2016-08-11 11:23:15 -060024906} // namespace vk
24907
24908#endif