Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright © 2016 Red Hat. |
| 3 | * Copyright © 2016 Bas Nieuwenhuizen |
| 4 | * |
| 5 | * based in part on anv driver which is: |
| 6 | * Copyright © 2015 Intel Corporation |
| 7 | * |
| 8 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 9 | * copy of this software and associated documentation files (the "Software"), |
| 10 | * to deal in the Software without restriction, including without limitation |
| 11 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 12 | * and/or sell copies of the Software, and to permit persons to whom the |
| 13 | * Software is furnished to do so, subject to the following conditions: |
| 14 | * |
| 15 | * The above copyright notice and this permission notice (including the next |
| 16 | * paragraph) shall be included in all copies or substantial portions of the |
| 17 | * Software. |
| 18 | * |
| 19 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 20 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 21 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 22 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 23 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
| 24 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 25 | * IN THE SOFTWARE. |
| 26 | */ |
| 27 | |
Edward O'Callaghan | ba43768 | 2016-10-07 22:19:19 +1100 | [diff] [blame] | 28 | #ifndef RADV_PRIVATE_H |
| 29 | #define RADV_PRIVATE_H |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 30 | |
| 31 | #include <stdlib.h> |
| 32 | #include <stdio.h> |
| 33 | #include <stdbool.h> |
| 34 | #include <pthread.h> |
| 35 | #include <assert.h> |
| 36 | #include <stdint.h> |
| 37 | #include <string.h> |
| 38 | #ifdef HAVE_VALGRIND |
| 39 | #include <valgrind.h> |
| 40 | #include <memcheck.h> |
| 41 | #define VG(x) x |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 42 | #else |
| 43 | #define VG(x) |
| 44 | #endif |
| 45 | |
| 46 | #include <amdgpu.h> |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 47 | #include "compiler/shader_enums.h" |
| 48 | #include "util/macros.h" |
| 49 | #include "util/list.h" |
Dave Airlie | 4450f40 | 2016-10-14 13:36:45 +1000 | [diff] [blame] | 50 | #include "util/vk_alloc.h" |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 51 | #include "main/macros.h" |
Dave Airlie | 4450f40 | 2016-10-14 13:36:45 +1000 | [diff] [blame] | 52 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 53 | #include "radv_radeon_winsys.h" |
| 54 | #include "ac_binary.h" |
| 55 | #include "ac_nir_to_llvm.h" |
Bas Nieuwenhuizen | fb7e4e1 | 2017-03-05 20:58:31 +0100 | [diff] [blame] | 56 | #include "radv_debug.h" |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 57 | #include "radv_descriptor_set.h" |
| 58 | |
| 59 | #include <llvm-c/TargetMachine.h> |
| 60 | |
| 61 | /* Pre-declarations needed for WSI entrypoints */ |
| 62 | struct wl_surface; |
| 63 | struct wl_display; |
| 64 | typedef struct xcb_connection_t xcb_connection_t; |
| 65 | typedef uint32_t xcb_visualid_t; |
| 66 | typedef uint32_t xcb_window_t; |
| 67 | |
| 68 | #include <vulkan/vulkan.h> |
| 69 | #include <vulkan/vulkan_intel.h> |
| 70 | #include <vulkan/vk_icd.h> |
| 71 | |
| 72 | #include "radv_entrypoints.h" |
| 73 | |
Dave Airlie | 6c3bd1c | 2016-10-14 07:49:34 +0100 | [diff] [blame] | 74 | #include "wsi_common.h" |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 75 | |
| 76 | #define MAX_VBS 32 |
| 77 | #define MAX_VERTEX_ATTRIBS 32 |
| 78 | #define MAX_RTS 8 |
| 79 | #define MAX_VIEWPORTS 16 |
| 80 | #define MAX_SCISSORS 16 |
| 81 | #define MAX_PUSH_CONSTANTS_SIZE 128 |
Fredrik Höglund | c6487bc | 2017-03-29 18:12:44 +0200 | [diff] [blame] | 82 | #define MAX_PUSH_DESCRIPTORS 32 |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 83 | #define MAX_DYNAMIC_BUFFERS 16 |
Grazvydas Ignotas | cb89d19 | 2017-01-11 03:31:24 +0200 | [diff] [blame] | 84 | #define MAX_SAMPLES_LOG2 4 |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 85 | #define NUM_META_FS_KEYS 11 |
Bas Nieuwenhuizen | 5ae4de1 | 2017-01-16 21:25:10 +0100 | [diff] [blame] | 86 | #define RADV_MAX_DRM_DEVICES 8 |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 87 | |
| 88 | #define NUM_DEPTH_CLEAR_PIPELINES 3 |
| 89 | |
Bas Nieuwenhuizen | 4ae84ef | 2016-12-08 22:28:21 +0100 | [diff] [blame] | 90 | enum radv_mem_heap { |
| 91 | RADV_MEM_HEAP_VRAM, |
| 92 | RADV_MEM_HEAP_VRAM_CPU_ACCESS, |
| 93 | RADV_MEM_HEAP_GTT, |
| 94 | RADV_MEM_HEAP_COUNT |
| 95 | }; |
| 96 | |
Bas Nieuwenhuizen | 53e1c97 | 2016-12-08 23:06:44 +0100 | [diff] [blame] | 97 | enum radv_mem_type { |
| 98 | RADV_MEM_TYPE_VRAM, |
| 99 | RADV_MEM_TYPE_GTT_WRITE_COMBINE, |
| 100 | RADV_MEM_TYPE_VRAM_CPU_ACCESS, |
| 101 | RADV_MEM_TYPE_GTT_CACHED, |
| 102 | RADV_MEM_TYPE_COUNT |
| 103 | }; |
| 104 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 105 | #define radv_printflike(a, b) __attribute__((__format__(__printf__, a, b))) |
| 106 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 107 | static inline uint32_t |
| 108 | align_u32(uint32_t v, uint32_t a) |
| 109 | { |
| 110 | assert(a != 0 && a == (a & -a)); |
| 111 | return (v + a - 1) & ~(a - 1); |
| 112 | } |
| 113 | |
| 114 | static inline uint32_t |
| 115 | align_u32_npot(uint32_t v, uint32_t a) |
| 116 | { |
| 117 | return (v + a - 1) / a * a; |
| 118 | } |
| 119 | |
| 120 | static inline uint64_t |
| 121 | align_u64(uint64_t v, uint64_t a) |
| 122 | { |
| 123 | assert(a != 0 && a == (a & -a)); |
| 124 | return (v + a - 1) & ~(a - 1); |
| 125 | } |
| 126 | |
| 127 | static inline int32_t |
| 128 | align_i32(int32_t v, int32_t a) |
| 129 | { |
| 130 | assert(a != 0 && a == (a & -a)); |
| 131 | return (v + a - 1) & ~(a - 1); |
| 132 | } |
| 133 | |
| 134 | /** Alignment must be a power of 2. */ |
| 135 | static inline bool |
| 136 | radv_is_aligned(uintmax_t n, uintmax_t a) |
| 137 | { |
| 138 | assert(a == (a & -a)); |
| 139 | return (n & (a - 1)) == 0; |
| 140 | } |
| 141 | |
| 142 | static inline uint32_t |
| 143 | round_up_u32(uint32_t v, uint32_t a) |
| 144 | { |
| 145 | return (v + a - 1) / a; |
| 146 | } |
| 147 | |
| 148 | static inline uint64_t |
| 149 | round_up_u64(uint64_t v, uint64_t a) |
| 150 | { |
| 151 | return (v + a - 1) / a; |
| 152 | } |
| 153 | |
| 154 | static inline uint32_t |
| 155 | radv_minify(uint32_t n, uint32_t levels) |
| 156 | { |
| 157 | if (unlikely(n == 0)) |
| 158 | return 0; |
| 159 | else |
Dave Airlie | c6f1077 | 2016-10-14 13:11:20 +1000 | [diff] [blame] | 160 | return MAX2(n >> levels, 1); |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 161 | } |
| 162 | static inline float |
| 163 | radv_clamp_f(float f, float min, float max) |
| 164 | { |
| 165 | assert(min < max); |
| 166 | |
| 167 | if (f > max) |
| 168 | return max; |
| 169 | else if (f < min) |
| 170 | return min; |
| 171 | else |
| 172 | return f; |
| 173 | } |
| 174 | |
| 175 | static inline bool |
| 176 | radv_clear_mask(uint32_t *inout_mask, uint32_t clear_mask) |
| 177 | { |
| 178 | if (*inout_mask & clear_mask) { |
| 179 | *inout_mask &= ~clear_mask; |
| 180 | return true; |
| 181 | } else { |
| 182 | return false; |
| 183 | } |
| 184 | } |
| 185 | |
| 186 | #define for_each_bit(b, dword) \ |
| 187 | for (uint32_t __dword = (dword); \ |
| 188 | (b) = __builtin_ffs(__dword) - 1, __dword; \ |
| 189 | __dword &= ~(1 << (b))) |
| 190 | |
| 191 | #define typed_memcpy(dest, src, count) ({ \ |
Edward O'Callaghan | a77426f | 2016-12-07 10:30:48 +1100 | [diff] [blame] | 192 | STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \ |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 193 | memcpy((dest), (src), (count) * sizeof(*(src))); \ |
| 194 | }) |
| 195 | |
| 196 | #define zero(x) (memset(&(x), 0, sizeof(x))) |
| 197 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 198 | /* Whenever we generate an error, pass it through this function. Useful for |
| 199 | * debugging, where we can break on it. Only call at error site, not when |
| 200 | * propagating errors. Might be useful to plug in a stack trace here. |
| 201 | */ |
| 202 | |
| 203 | VkResult __vk_errorf(VkResult error, const char *file, int line, const char *format, ...); |
| 204 | |
| 205 | #ifdef DEBUG |
| 206 | #define vk_error(error) __vk_errorf(error, __FILE__, __LINE__, NULL); |
| 207 | #define vk_errorf(error, format, ...) __vk_errorf(error, __FILE__, __LINE__, format, ## __VA_ARGS__); |
| 208 | #else |
| 209 | #define vk_error(error) error |
| 210 | #define vk_errorf(error, format, ...) error |
| 211 | #endif |
| 212 | |
| 213 | void __radv_finishme(const char *file, int line, const char *format, ...) |
| 214 | radv_printflike(3, 4); |
| 215 | void radv_loge(const char *format, ...) radv_printflike(1, 2); |
| 216 | void radv_loge_v(const char *format, va_list va); |
| 217 | |
| 218 | /** |
| 219 | * Print a FINISHME message, including its source location. |
| 220 | */ |
| 221 | #define radv_finishme(format, ...) \ |
Emil Velikov | f3a1c17 | 2016-11-24 18:18:14 +0000 | [diff] [blame] | 222 | do { \ |
| 223 | static bool reported = false; \ |
| 224 | if (!reported) { \ |
| 225 | __radv_finishme(__FILE__, __LINE__, format, ##__VA_ARGS__); \ |
| 226 | reported = true; \ |
| 227 | } \ |
| 228 | } while (0) |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 229 | |
| 230 | /* A non-fatal assert. Useful for debugging. */ |
| 231 | #ifdef DEBUG |
| 232 | #define radv_assert(x) ({ \ |
| 233 | if (unlikely(!(x))) \ |
| 234 | fprintf(stderr, "%s:%d ASSERT: %s\n", __FILE__, __LINE__, #x); \ |
| 235 | }) |
| 236 | #else |
| 237 | #define radv_assert(x) |
| 238 | #endif |
| 239 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 240 | #define stub_return(v) \ |
| 241 | do { \ |
| 242 | radv_finishme("stub %s", __func__); \ |
| 243 | return (v); \ |
| 244 | } while (0) |
| 245 | |
| 246 | #define stub() \ |
| 247 | do { \ |
| 248 | radv_finishme("stub %s", __func__); \ |
| 249 | return; \ |
| 250 | } while (0) |
| 251 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 252 | void *radv_lookup_entrypoint(const char *name); |
| 253 | |
Andres Rodriguez | 1e1bddf | 2017-01-13 18:44:17 -0500 | [diff] [blame] | 254 | struct radv_extensions { |
| 255 | VkExtensionProperties *ext_array; |
| 256 | uint32_t num_ext; |
| 257 | }; |
| 258 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 259 | struct radv_physical_device { |
| 260 | VK_LOADER_DATA _loader_data; |
| 261 | |
| 262 | struct radv_instance * instance; |
| 263 | |
| 264 | struct radeon_winsys *ws; |
| 265 | struct radeon_info rad_info; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 266 | char path[20]; |
| 267 | const char * name; |
Bas Nieuwenhuizen | 43ee491 | 2016-11-22 00:31:44 +0100 | [diff] [blame] | 268 | uint8_t uuid[VK_UUID_SIZE]; |
| 269 | |
Dave Airlie | 0a44a68 | 2017-02-19 15:27:47 +1000 | [diff] [blame] | 270 | int local_fd; |
Dave Airlie | 6c3bd1c | 2016-10-14 07:49:34 +0100 | [diff] [blame] | 271 | struct wsi_device wsi_device; |
Andres Rodriguez | 1e1bddf | 2017-01-13 18:44:17 -0500 | [diff] [blame] | 272 | struct radv_extensions extensions; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 273 | }; |
| 274 | |
| 275 | struct radv_instance { |
| 276 | VK_LOADER_DATA _loader_data; |
| 277 | |
| 278 | VkAllocationCallbacks alloc; |
| 279 | |
| 280 | uint32_t apiVersion; |
| 281 | int physicalDeviceCount; |
Bas Nieuwenhuizen | 5ae4de1 | 2017-01-16 21:25:10 +0100 | [diff] [blame] | 282 | struct radv_physical_device physicalDevices[RADV_MAX_DRM_DEVICES]; |
Bas Nieuwenhuizen | 8bc39e2 | 2017-01-02 18:57:02 +0100 | [diff] [blame] | 283 | |
| 284 | uint64_t debug_flags; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 285 | }; |
| 286 | |
| 287 | VkResult radv_init_wsi(struct radv_physical_device *physical_device); |
| 288 | void radv_finish_wsi(struct radv_physical_device *physical_device); |
| 289 | |
| 290 | struct cache_entry; |
| 291 | |
| 292 | struct radv_pipeline_cache { |
| 293 | struct radv_device * device; |
| 294 | pthread_mutex_t mutex; |
| 295 | |
| 296 | uint32_t total_size; |
| 297 | uint32_t table_size; |
| 298 | uint32_t kernel_count; |
| 299 | struct cache_entry ** hash_table; |
| 300 | bool modified; |
| 301 | |
| 302 | VkAllocationCallbacks alloc; |
| 303 | }; |
| 304 | |
| 305 | void |
| 306 | radv_pipeline_cache_init(struct radv_pipeline_cache *cache, |
| 307 | struct radv_device *device); |
| 308 | void |
| 309 | radv_pipeline_cache_finish(struct radv_pipeline_cache *cache); |
| 310 | void |
| 311 | radv_pipeline_cache_load(struct radv_pipeline_cache *cache, |
| 312 | const void *data, size_t size); |
| 313 | |
| 314 | struct radv_shader_variant * |
| 315 | radv_create_shader_variant_from_pipeline_cache(struct radv_device *device, |
| 316 | struct radv_pipeline_cache *cache, |
| 317 | const unsigned char *sha1); |
| 318 | |
| 319 | struct radv_shader_variant * |
Dave Airlie | 10c2b58 | 2017-03-20 13:24:02 +1000 | [diff] [blame] | 320 | radv_pipeline_cache_insert_shader(struct radv_pipeline_cache *cache, |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 321 | const unsigned char *sha1, |
| 322 | struct radv_shader_variant *variant, |
| 323 | const void *code, unsigned code_size); |
| 324 | |
| 325 | void radv_shader_variant_destroy(struct radv_device *device, |
| 326 | struct radv_shader_variant *variant); |
| 327 | |
| 328 | struct radv_meta_state { |
| 329 | VkAllocationCallbacks alloc; |
| 330 | |
| 331 | struct radv_pipeline_cache cache; |
| 332 | |
| 333 | /** |
| 334 | * Use array element `i` for images with `2^i` samples. |
| 335 | */ |
| 336 | struct { |
| 337 | VkRenderPass render_pass[NUM_META_FS_KEYS]; |
| 338 | struct radv_pipeline *color_pipelines[NUM_META_FS_KEYS]; |
| 339 | |
Dave Airlie | 3634dfd | 2017-01-17 06:37:36 +1000 | [diff] [blame] | 340 | VkRenderPass depthstencil_rp; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 341 | struct radv_pipeline *depth_only_pipeline[NUM_DEPTH_CLEAR_PIPELINES]; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 342 | struct radv_pipeline *stencil_only_pipeline[NUM_DEPTH_CLEAR_PIPELINES]; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 343 | struct radv_pipeline *depthstencil_pipeline[NUM_DEPTH_CLEAR_PIPELINES]; |
| 344 | } clear[1 + MAX_SAMPLES_LOG2]; |
| 345 | |
| 346 | struct { |
| 347 | VkRenderPass render_pass[NUM_META_FS_KEYS]; |
| 348 | |
| 349 | /** Pipeline that blits from a 1D image. */ |
| 350 | VkPipeline pipeline_1d_src[NUM_META_FS_KEYS]; |
| 351 | |
| 352 | /** Pipeline that blits from a 2D image. */ |
| 353 | VkPipeline pipeline_2d_src[NUM_META_FS_KEYS]; |
| 354 | |
| 355 | /** Pipeline that blits from a 3D image. */ |
| 356 | VkPipeline pipeline_3d_src[NUM_META_FS_KEYS]; |
| 357 | |
| 358 | VkRenderPass depth_only_rp; |
| 359 | VkPipeline depth_only_1d_pipeline; |
| 360 | VkPipeline depth_only_2d_pipeline; |
| 361 | VkPipeline depth_only_3d_pipeline; |
| 362 | |
| 363 | VkRenderPass stencil_only_rp; |
| 364 | VkPipeline stencil_only_1d_pipeline; |
| 365 | VkPipeline stencil_only_2d_pipeline; |
| 366 | VkPipeline stencil_only_3d_pipeline; |
| 367 | VkPipelineLayout pipeline_layout; |
| 368 | VkDescriptorSetLayout ds_layout; |
| 369 | } blit; |
| 370 | |
| 371 | struct { |
| 372 | VkRenderPass render_passes[NUM_META_FS_KEYS]; |
| 373 | |
| 374 | VkPipelineLayout p_layouts[2]; |
| 375 | VkDescriptorSetLayout ds_layouts[2]; |
| 376 | VkPipeline pipelines[2][NUM_META_FS_KEYS]; |
| 377 | |
| 378 | VkRenderPass depth_only_rp; |
| 379 | VkPipeline depth_only_pipeline[2]; |
| 380 | |
| 381 | VkRenderPass stencil_only_rp; |
| 382 | VkPipeline stencil_only_pipeline[2]; |
| 383 | } blit2d; |
| 384 | |
| 385 | struct { |
| 386 | VkPipelineLayout img_p_layout; |
| 387 | VkDescriptorSetLayout img_ds_layout; |
| 388 | VkPipeline pipeline; |
| 389 | } itob; |
| 390 | struct { |
| 391 | VkRenderPass render_pass; |
| 392 | VkPipelineLayout img_p_layout; |
| 393 | VkDescriptorSetLayout img_ds_layout; |
| 394 | VkPipeline pipeline; |
| 395 | } btoi; |
Dave Airlie | ef5f59c | 2016-11-30 00:26:47 +0000 | [diff] [blame] | 396 | struct { |
| 397 | VkPipelineLayout img_p_layout; |
| 398 | VkDescriptorSetLayout img_ds_layout; |
| 399 | VkPipeline pipeline; |
| 400 | } itoi; |
Dave Airlie | f11ea87 | 2016-11-30 01:45:24 +0000 | [diff] [blame] | 401 | struct { |
| 402 | VkPipelineLayout img_p_layout; |
| 403 | VkDescriptorSetLayout img_ds_layout; |
| 404 | VkPipeline pipeline; |
| 405 | } cleari; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 406 | |
| 407 | struct { |
| 408 | VkPipeline pipeline; |
| 409 | VkRenderPass pass; |
| 410 | } resolve; |
| 411 | |
| 412 | struct { |
| 413 | VkDescriptorSetLayout ds_layout; |
| 414 | VkPipelineLayout p_layout; |
| 415 | struct { |
| 416 | VkPipeline pipeline; |
| 417 | VkPipeline i_pipeline; |
| 418 | } rc[MAX_SAMPLES_LOG2]; |
| 419 | } resolve_compute; |
| 420 | |
| 421 | struct { |
| 422 | VkPipeline decompress_pipeline; |
| 423 | VkPipeline resummarize_pipeline; |
| 424 | VkRenderPass pass; |
| 425 | } depth_decomp; |
| 426 | |
| 427 | struct { |
| 428 | VkPipeline cmask_eliminate_pipeline; |
| 429 | VkPipeline fmask_decompress_pipeline; |
| 430 | VkRenderPass pass; |
| 431 | } fast_clear_flush; |
| 432 | |
| 433 | struct { |
| 434 | VkPipelineLayout fill_p_layout; |
| 435 | VkPipelineLayout copy_p_layout; |
| 436 | VkDescriptorSetLayout fill_ds_layout; |
| 437 | VkDescriptorSetLayout copy_ds_layout; |
| 438 | VkPipeline fill_pipeline; |
| 439 | VkPipeline copy_pipeline; |
| 440 | } buffer; |
Bas Nieuwenhuizen | ce0c8cf | 2017-02-26 18:21:01 +0100 | [diff] [blame^] | 441 | |
| 442 | struct { |
| 443 | VkDescriptorSetLayout occlusion_query_ds_layout; |
| 444 | VkPipelineLayout occlusion_query_p_layout; |
| 445 | VkPipeline occlusion_query_pipeline; |
| 446 | } query; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 447 | }; |
| 448 | |
Dave Airlie | c20701f | 2016-11-30 04:30:06 +0000 | [diff] [blame] | 449 | /* queue types */ |
| 450 | #define RADV_QUEUE_GENERAL 0 |
| 451 | #define RADV_QUEUE_COMPUTE 1 |
| 452 | #define RADV_QUEUE_TRANSFER 2 |
| 453 | |
| 454 | #define RADV_MAX_QUEUE_FAMILIES 3 |
| 455 | |
Dave Airlie | 94a7434 | 2016-12-01 00:05:29 +0000 | [diff] [blame] | 456 | enum ring_type radv_queue_family_to_ring(int f); |
| 457 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 458 | struct radv_queue { |
| 459 | VK_LOADER_DATA _loader_data; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 460 | struct radv_device * device; |
Andres Rodriguez | 0eb8b6a | 2017-01-13 18:44:15 -0500 | [diff] [blame] | 461 | struct radeon_winsys_ctx *hw_ctx; |
Dave Airlie | c20701f | 2016-11-30 04:30:06 +0000 | [diff] [blame] | 462 | int queue_family_index; |
| 463 | int queue_idx; |
Bas Nieuwenhuizen | c4d7b9c | 2017-01-29 13:53:05 +0100 | [diff] [blame] | 464 | |
| 465 | uint32_t scratch_size; |
| 466 | uint32_t compute_scratch_size; |
Dave Airlie | 1fa5b75 | 2017-01-20 11:06:52 +1000 | [diff] [blame] | 467 | uint32_t esgs_ring_size; |
| 468 | uint32_t gsvs_ring_size; |
Dave Airlie | 46e52df | 2017-03-30 08:02:14 +0100 | [diff] [blame] | 469 | bool has_tess_rings; |
Dave Airlie | 1171b30 | 2017-04-03 04:38:12 +0100 | [diff] [blame] | 470 | bool has_sample_positions; |
Bas Nieuwenhuizen | c4d7b9c | 2017-01-29 13:53:05 +0100 | [diff] [blame] | 471 | |
| 472 | struct radeon_winsys_bo *scratch_bo; |
| 473 | struct radeon_winsys_bo *descriptor_bo; |
| 474 | struct radeon_winsys_bo *compute_scratch_bo; |
Dave Airlie | 1fa5b75 | 2017-01-20 11:06:52 +1000 | [diff] [blame] | 475 | struct radeon_winsys_bo *esgs_ring_bo; |
| 476 | struct radeon_winsys_bo *gsvs_ring_bo; |
Dave Airlie | 46e52df | 2017-03-30 08:02:14 +0100 | [diff] [blame] | 477 | struct radeon_winsys_bo *tess_factor_ring_bo; |
| 478 | struct radeon_winsys_bo *tess_offchip_ring_bo; |
Bas Nieuwenhuizen | 5241fb0 | 2017-02-20 09:26:00 +0100 | [diff] [blame] | 479 | struct radeon_winsys_cs *initial_preamble_cs; |
| 480 | struct radeon_winsys_cs *continue_preamble_cs; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 481 | }; |
| 482 | |
| 483 | struct radv_device { |
| 484 | VK_LOADER_DATA _loader_data; |
| 485 | |
| 486 | VkAllocationCallbacks alloc; |
| 487 | |
| 488 | struct radv_instance * instance; |
| 489 | struct radeon_winsys *ws; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 490 | |
| 491 | struct radv_meta_state meta_state; |
Dave Airlie | c20701f | 2016-11-30 04:30:06 +0000 | [diff] [blame] | 492 | |
| 493 | struct radv_queue *queues[RADV_MAX_QUEUE_FAMILIES]; |
| 494 | int queue_count[RADV_MAX_QUEUE_FAMILIES]; |
Bas Nieuwenhuizen | f2523eb | 2016-12-17 21:53:38 +0100 | [diff] [blame] | 495 | struct radeon_winsys_cs *empty_cs[RADV_MAX_QUEUE_FAMILIES]; |
Bas Nieuwenhuizen | 5241fb0 | 2017-02-20 09:26:00 +0100 | [diff] [blame] | 496 | struct radeon_winsys_cs *flush_cs[RADV_MAX_QUEUE_FAMILIES]; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 497 | |
Bas Nieuwenhuizen | 8bc39e2 | 2017-01-02 18:57:02 +0100 | [diff] [blame] | 498 | uint64_t debug_flags; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 499 | |
Bas Nieuwenhuizen | ccff93e | 2017-01-29 15:20:03 +0100 | [diff] [blame] | 500 | bool llvm_supports_spill; |
Dave Airlie | bbfb62d | 2017-03-30 07:58:22 +0100 | [diff] [blame] | 501 | bool has_distributed_tess; |
| 502 | uint32_t tess_offchip_block_dw_size; |
Bas Nieuwenhuizen | ccff93e | 2017-01-29 15:20:03 +0100 | [diff] [blame] | 503 | uint32_t scratch_waves; |
Dave Airlie | 68c5da7 | 2017-01-18 13:54:17 +1000 | [diff] [blame] | 504 | |
| 505 | uint32_t gs_table_depth; |
| 506 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 507 | /* MSAA sample locations. |
| 508 | * The first index is the sample index. |
| 509 | * The second index is the coordinate: X, Y. */ |
| 510 | float sample_locations_1x[1][2]; |
| 511 | float sample_locations_2x[2][2]; |
| 512 | float sample_locations_4x[4][2]; |
| 513 | float sample_locations_8x[8][2]; |
| 514 | float sample_locations_16x[16][2]; |
Bas Nieuwenhuizen | 97dfff5 | 2016-12-23 23:51:18 +0100 | [diff] [blame] | 515 | |
Dave Airlie | 592069c | 2017-02-13 04:00:24 +0000 | [diff] [blame] | 516 | /* CIK and later */ |
| 517 | uint32_t gfx_init_size_dw; |
| 518 | struct radeon_winsys_bo *gfx_init; |
| 519 | |
Bas Nieuwenhuizen | 97dfff5 | 2016-12-23 23:51:18 +0100 | [diff] [blame] | 520 | struct radeon_winsys_bo *trace_bo; |
| 521 | uint32_t *trace_id_ptr; |
Bas Nieuwenhuizen | 8406f79 | 2017-01-16 21:23:48 +0100 | [diff] [blame] | 522 | |
| 523 | struct radv_physical_device *physical_device; |
Timothy Arceri | 315e8a9 | 2017-03-15 14:14:24 +1100 | [diff] [blame] | 524 | |
| 525 | /* Backup in-memory cache to be used if the app doesn't provide one */ |
| 526 | struct radv_pipeline_cache * mem_cache; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 527 | }; |
| 528 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 529 | struct radv_device_memory { |
| 530 | struct radeon_winsys_bo *bo; |
Dave Airlie | 15f4702 | 2017-02-26 23:52:08 +0000 | [diff] [blame] | 531 | /* for dedicated allocations */ |
| 532 | struct radv_image *image; |
| 533 | struct radv_buffer *buffer; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 534 | uint32_t type_index; |
| 535 | VkDeviceSize map_size; |
| 536 | void * map; |
| 537 | }; |
| 538 | |
| 539 | |
| 540 | struct radv_descriptor_range { |
| 541 | uint64_t va; |
| 542 | uint32_t size; |
| 543 | }; |
| 544 | |
| 545 | struct radv_descriptor_set { |
| 546 | const struct radv_descriptor_set_layout *layout; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 547 | uint32_t size; |
| 548 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 549 | struct radeon_winsys_bo *bo; |
| 550 | uint64_t va; |
| 551 | uint32_t *mapped_ptr; |
| 552 | struct radv_descriptor_range *dynamic_descriptors; |
Bas Nieuwenhuizen | d5bf4c7 | 2017-02-16 21:23:58 +0100 | [diff] [blame] | 553 | |
| 554 | struct list_head vram_list; |
| 555 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 556 | struct radeon_winsys_bo *descriptors[0]; |
| 557 | }; |
| 558 | |
Fredrik Höglund | c6487bc | 2017-03-29 18:12:44 +0200 | [diff] [blame] | 559 | struct radv_push_descriptor_set |
| 560 | { |
| 561 | struct radv_descriptor_set set; |
| 562 | uint32_t capacity; |
| 563 | }; |
| 564 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 565 | struct radv_descriptor_pool { |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 566 | struct radeon_winsys_bo *bo; |
| 567 | uint8_t *mapped_ptr; |
| 568 | uint64_t current_offset; |
| 569 | uint64_t size; |
| 570 | |
Bas Nieuwenhuizen | d5bf4c7 | 2017-02-16 21:23:58 +0100 | [diff] [blame] | 571 | struct list_head vram_list; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 572 | }; |
| 573 | |
Fredrik Höglund | c1f8c83 | 2017-03-29 19:19:47 +0200 | [diff] [blame] | 574 | struct radv_descriptor_update_template_entry { |
| 575 | VkDescriptorType descriptor_type; |
| 576 | |
| 577 | /* The number of descriptors to update */ |
| 578 | uint16_t descriptor_count; |
| 579 | |
| 580 | /* Into mapped_ptr or dynamic_descriptors, in units of the respective array */ |
| 581 | uint16_t dst_offset; |
| 582 | |
| 583 | /* In dwords. Not valid/used for dynamic descriptors */ |
| 584 | uint16_t dst_stride; |
| 585 | |
| 586 | uint16_t buffer_offset; |
| 587 | uint16_t buffer_count; |
| 588 | |
| 589 | /* Only valid for combined image samplers and samplers */ |
| 590 | uint16_t has_sampler; |
| 591 | |
| 592 | /* In bytes */ |
| 593 | size_t src_offset; |
| 594 | size_t src_stride; |
| 595 | |
| 596 | /* For push descriptors */ |
| 597 | uint32_t *immutable_samplers; |
| 598 | }; |
| 599 | |
| 600 | struct radv_descriptor_update_template { |
| 601 | uint32_t entry_count; |
| 602 | struct radv_descriptor_update_template_entry entry[0]; |
| 603 | }; |
| 604 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 605 | struct radv_buffer { |
| 606 | struct radv_device * device; |
| 607 | VkDeviceSize size; |
| 608 | |
| 609 | VkBufferUsageFlags usage; |
Bas Nieuwenhuizen | ef0e505 | 2017-02-04 11:15:59 +0100 | [diff] [blame] | 610 | VkBufferCreateFlags flags; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 611 | |
| 612 | /* Set when bound */ |
| 613 | struct radeon_winsys_bo * bo; |
| 614 | VkDeviceSize offset; |
| 615 | }; |
| 616 | |
| 617 | |
| 618 | enum radv_cmd_dirty_bits { |
| 619 | RADV_CMD_DIRTY_DYNAMIC_VIEWPORT = 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */ |
| 620 | RADV_CMD_DIRTY_DYNAMIC_SCISSOR = 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */ |
| 621 | RADV_CMD_DIRTY_DYNAMIC_LINE_WIDTH = 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */ |
| 622 | RADV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS = 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */ |
| 623 | RADV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS = 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */ |
| 624 | RADV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS = 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */ |
| 625 | RADV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK = 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */ |
| 626 | RADV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK = 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */ |
| 627 | RADV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE = 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */ |
| 628 | RADV_CMD_DIRTY_DYNAMIC_ALL = (1 << 9) - 1, |
| 629 | RADV_CMD_DIRTY_PIPELINE = 1 << 9, |
| 630 | RADV_CMD_DIRTY_INDEX_BUFFER = 1 << 10, |
| 631 | RADV_CMD_DIRTY_RENDER_TARGETS = 1 << 11, |
| 632 | }; |
| 633 | typedef uint32_t radv_cmd_dirty_mask_t; |
| 634 | |
| 635 | enum radv_cmd_flush_bits { |
| 636 | RADV_CMD_FLAG_INV_ICACHE = 1 << 0, |
| 637 | /* SMEM L1, other names: KCACHE, constant cache, DCACHE, data cache */ |
| 638 | RADV_CMD_FLAG_INV_SMEM_L1 = 1 << 1, |
| 639 | /* VMEM L1 can optionally be bypassed (GLC=1). Other names: TC L1 */ |
| 640 | RADV_CMD_FLAG_INV_VMEM_L1 = 1 << 2, |
| 641 | /* Used by everything except CB/DB, can be bypassed (SLC=1). Other names: TC L2 */ |
| 642 | RADV_CMD_FLAG_INV_GLOBAL_L2 = 1 << 3, |
Bas Nieuwenhuizen | 66e12d4 | 2017-03-06 01:28:53 +0100 | [diff] [blame] | 643 | /* Same as above, but only writes back and doesn't invalidate */ |
| 644 | RADV_CMD_FLAG_WRITEBACK_GLOBAL_L2 = 1 << 4, |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 645 | /* Framebuffer caches */ |
Bas Nieuwenhuizen | 66e12d4 | 2017-03-06 01:28:53 +0100 | [diff] [blame] | 646 | RADV_CMD_FLAG_FLUSH_AND_INV_CB_META = 1 << 5, |
| 647 | RADV_CMD_FLAG_FLUSH_AND_INV_DB_META = 1 << 6, |
| 648 | RADV_CMD_FLAG_FLUSH_AND_INV_DB = 1 << 7, |
| 649 | RADV_CMD_FLAG_FLUSH_AND_INV_CB = 1 << 8, |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 650 | /* Engine synchronization. */ |
Bas Nieuwenhuizen | 66e12d4 | 2017-03-06 01:28:53 +0100 | [diff] [blame] | 651 | RADV_CMD_FLAG_VS_PARTIAL_FLUSH = 1 << 9, |
| 652 | RADV_CMD_FLAG_PS_PARTIAL_FLUSH = 1 << 10, |
| 653 | RADV_CMD_FLAG_CS_PARTIAL_FLUSH = 1 << 11, |
| 654 | RADV_CMD_FLAG_VGT_FLUSH = 1 << 12, |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 655 | |
| 656 | RADV_CMD_FLUSH_AND_INV_FRAMEBUFFER = (RADV_CMD_FLAG_FLUSH_AND_INV_CB | |
| 657 | RADV_CMD_FLAG_FLUSH_AND_INV_CB_META | |
| 658 | RADV_CMD_FLAG_FLUSH_AND_INV_DB | |
| 659 | RADV_CMD_FLAG_FLUSH_AND_INV_DB_META) |
| 660 | }; |
| 661 | |
| 662 | struct radv_vertex_binding { |
| 663 | struct radv_buffer * buffer; |
| 664 | VkDeviceSize offset; |
| 665 | }; |
| 666 | |
| 667 | struct radv_dynamic_state { |
| 668 | struct { |
| 669 | uint32_t count; |
| 670 | VkViewport viewports[MAX_VIEWPORTS]; |
| 671 | } viewport; |
| 672 | |
| 673 | struct { |
| 674 | uint32_t count; |
| 675 | VkRect2D scissors[MAX_SCISSORS]; |
| 676 | } scissor; |
| 677 | |
| 678 | float line_width; |
| 679 | |
| 680 | struct { |
| 681 | float bias; |
| 682 | float clamp; |
| 683 | float slope; |
| 684 | } depth_bias; |
| 685 | |
| 686 | float blend_constants[4]; |
| 687 | |
| 688 | struct { |
| 689 | float min; |
| 690 | float max; |
| 691 | } depth_bounds; |
| 692 | |
| 693 | struct { |
| 694 | uint32_t front; |
| 695 | uint32_t back; |
| 696 | } stencil_compare_mask; |
| 697 | |
| 698 | struct { |
| 699 | uint32_t front; |
| 700 | uint32_t back; |
| 701 | } stencil_write_mask; |
| 702 | |
| 703 | struct { |
| 704 | uint32_t front; |
| 705 | uint32_t back; |
| 706 | } stencil_reference; |
| 707 | }; |
| 708 | |
| 709 | extern const struct radv_dynamic_state default_dynamic_state; |
| 710 | |
| 711 | void radv_dynamic_state_copy(struct radv_dynamic_state *dest, |
| 712 | const struct radv_dynamic_state *src, |
| 713 | uint32_t copy_mask); |
| 714 | /** |
| 715 | * Attachment state when recording a renderpass instance. |
| 716 | * |
| 717 | * The clear value is valid only if there exists a pending clear. |
| 718 | */ |
| 719 | struct radv_attachment_state { |
| 720 | VkImageAspectFlags pending_clear_aspects; |
| 721 | VkClearValue clear_value; |
| 722 | VkImageLayout current_layout; |
| 723 | }; |
| 724 | |
| 725 | struct radv_cmd_state { |
| 726 | uint32_t vb_dirty; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 727 | radv_cmd_dirty_mask_t dirty; |
Dave Airlie | 3360dbe | 2017-02-13 07:30:29 +0000 | [diff] [blame] | 728 | bool vertex_descriptors_dirty; |
Fredrik Höglund | c6487bc | 2017-03-29 18:12:44 +0200 | [diff] [blame] | 729 | bool push_descriptors_dirty; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 730 | |
| 731 | struct radv_pipeline * pipeline; |
| 732 | struct radv_pipeline * emitted_pipeline; |
| 733 | struct radv_pipeline * compute_pipeline; |
| 734 | struct radv_pipeline * emitted_compute_pipeline; |
| 735 | struct radv_framebuffer * framebuffer; |
| 736 | struct radv_render_pass * pass; |
| 737 | const struct radv_subpass * subpass; |
| 738 | struct radv_dynamic_state dynamic; |
| 739 | struct radv_vertex_binding vertex_bindings[MAX_VBS]; |
| 740 | struct radv_descriptor_set * descriptors[MAX_SETS]; |
| 741 | struct radv_attachment_state * attachments; |
| 742 | VkRect2D render_area; |
| 743 | struct radv_buffer * index_buffer; |
| 744 | uint32_t index_type; |
| 745 | uint32_t index_offset; |
| 746 | uint32_t last_primitive_reset_index; |
| 747 | enum radv_cmd_flush_bits flush_bits; |
| 748 | unsigned active_occlusion_queries; |
| 749 | float offset_scale; |
Dave Airlie | 85118a1 | 2016-11-28 00:42:56 +0000 | [diff] [blame] | 750 | uint32_t descriptors_dirty; |
Bas Nieuwenhuizen | 97dfff5 | 2016-12-23 23:51:18 +0100 | [diff] [blame] | 751 | uint32_t trace_id; |
Dave Airlie | 3360dbe | 2017-02-13 07:30:29 +0000 | [diff] [blame] | 752 | uint32_t last_ia_multi_vgt_param; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 753 | }; |
Dave Airlie | 94a7434 | 2016-12-01 00:05:29 +0000 | [diff] [blame] | 754 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 755 | struct radv_cmd_pool { |
| 756 | VkAllocationCallbacks alloc; |
| 757 | struct list_head cmd_buffers; |
Bas Nieuwenhuizen | 682248d | 2017-03-05 22:25:20 +0100 | [diff] [blame] | 758 | struct list_head free_cmd_buffers; |
Dave Airlie | 94a7434 | 2016-12-01 00:05:29 +0000 | [diff] [blame] | 759 | uint32_t queue_family_index; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 760 | }; |
| 761 | |
| 762 | struct radv_cmd_buffer_upload { |
| 763 | uint8_t *map; |
| 764 | unsigned offset; |
| 765 | uint64_t size; |
| 766 | struct radeon_winsys_bo *upload_bo; |
| 767 | struct list_head list; |
| 768 | }; |
| 769 | |
| 770 | struct radv_cmd_buffer { |
| 771 | VK_LOADER_DATA _loader_data; |
| 772 | |
| 773 | struct radv_device * device; |
| 774 | |
| 775 | struct radv_cmd_pool * pool; |
| 776 | struct list_head pool_link; |
| 777 | |
| 778 | VkCommandBufferUsageFlags usage_flags; |
| 779 | VkCommandBufferLevel level; |
| 780 | struct radeon_winsys_cs *cs; |
| 781 | struct radv_cmd_state state; |
Dave Airlie | 94a7434 | 2016-12-01 00:05:29 +0000 | [diff] [blame] | 782 | uint32_t queue_family_index; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 783 | |
| 784 | uint8_t push_constants[MAX_PUSH_CONSTANTS_SIZE]; |
Fredrik Höglund | 71bb1a9 | 2017-03-07 01:53:21 +0100 | [diff] [blame] | 785 | uint32_t dynamic_buffers[4 * MAX_DYNAMIC_BUFFERS]; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 786 | VkShaderStageFlags push_constant_stages; |
Fredrik Höglund | c6487bc | 2017-03-29 18:12:44 +0200 | [diff] [blame] | 787 | struct radv_push_descriptor_set push_descriptors; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 788 | |
| 789 | struct radv_cmd_buffer_upload upload; |
| 790 | |
| 791 | bool record_fail; |
Bas Nieuwenhuizen | ccff93e | 2017-01-29 15:20:03 +0100 | [diff] [blame] | 792 | |
| 793 | uint32_t scratch_size_needed; |
| 794 | uint32_t compute_scratch_size_needed; |
Dave Airlie | 1fa5b75 | 2017-01-20 11:06:52 +1000 | [diff] [blame] | 795 | uint32_t esgs_ring_size_needed; |
| 796 | uint32_t gsvs_ring_size_needed; |
Dave Airlie | 46e52df | 2017-03-30 08:02:14 +0100 | [diff] [blame] | 797 | bool tess_rings_needed; |
Dave Airlie | 1171b30 | 2017-04-03 04:38:12 +0100 | [diff] [blame] | 798 | bool sample_positions_needed; |
Dave Airlie | 1fa5b75 | 2017-01-20 11:06:52 +1000 | [diff] [blame] | 799 | |
| 800 | int ring_offsets_idx; /* just used for verification */ |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 801 | }; |
| 802 | |
| 803 | struct radv_image; |
| 804 | |
Dave Airlie | 94a7434 | 2016-12-01 00:05:29 +0000 | [diff] [blame] | 805 | bool radv_cmd_buffer_uses_mec(struct radv_cmd_buffer *cmd_buffer); |
| 806 | |
Dave Airlie | 604e562 | 2017-02-13 03:35:37 +0000 | [diff] [blame] | 807 | void si_init_compute(struct radv_cmd_buffer *cmd_buffer); |
| 808 | void si_init_config(struct radv_cmd_buffer *cmd_buffer); |
Dave Airlie | 592069c | 2017-02-13 04:00:24 +0000 | [diff] [blame] | 809 | |
| 810 | void cik_create_gfx_config(struct radv_device *device); |
| 811 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 812 | void si_write_viewport(struct radeon_winsys_cs *cs, int first_vp, |
| 813 | int count, const VkViewport *viewports); |
| 814 | void si_write_scissors(struct radeon_winsys_cs *cs, int first, |
Bas Nieuwenhuizen | 0f3de89 | 2017-03-01 09:32:19 +0100 | [diff] [blame] | 815 | int count, const VkRect2D *scissors, |
| 816 | const VkViewport *viewports, bool can_use_guardband); |
Dave Airlie | 3360dbe | 2017-02-13 07:30:29 +0000 | [diff] [blame] | 817 | uint32_t si_get_ia_multi_vgt_param(struct radv_cmd_buffer *cmd_buffer, |
Dave Airlie | ae0551b | 2017-03-28 05:53:50 +1000 | [diff] [blame] | 818 | bool instanced_draw, bool indirect_draw, |
| 819 | uint32_t draw_vertex_count); |
Bas Nieuwenhuizen | 5241fb0 | 2017-02-20 09:26:00 +0100 | [diff] [blame] | 820 | void si_cs_emit_cache_flush(struct radeon_winsys_cs *cs, |
| 821 | enum chip_class chip_class, |
| 822 | bool is_mec, |
| 823 | enum radv_cmd_flush_bits flush_bits); |
| 824 | void si_cs_emit_cache_flush(struct radeon_winsys_cs *cs, |
| 825 | enum chip_class chip_class, |
| 826 | bool is_mec, |
| 827 | enum radv_cmd_flush_bits flush_bits); |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 828 | void si_emit_cache_flush(struct radv_cmd_buffer *cmd_buffer); |
| 829 | void si_cp_dma_buffer_copy(struct radv_cmd_buffer *cmd_buffer, |
| 830 | uint64_t src_va, uint64_t dest_va, |
| 831 | uint64_t size); |
| 832 | void si_cp_dma_clear_buffer(struct radv_cmd_buffer *cmd_buffer, uint64_t va, |
| 833 | uint64_t size, unsigned value); |
| 834 | void radv_set_db_count_control(struct radv_cmd_buffer *cmd_buffer); |
| 835 | void radv_bind_descriptor_set(struct radv_cmd_buffer *cmd_buffer, |
| 836 | struct radv_descriptor_set *set, |
| 837 | unsigned idx); |
| 838 | bool |
| 839 | radv_cmd_buffer_upload_alloc(struct radv_cmd_buffer *cmd_buffer, |
| 840 | unsigned size, |
| 841 | unsigned alignment, |
| 842 | unsigned *out_offset, |
| 843 | void **ptr); |
| 844 | void |
| 845 | radv_cmd_buffer_set_subpass(struct radv_cmd_buffer *cmd_buffer, |
| 846 | const struct radv_subpass *subpass, |
| 847 | bool transitions); |
| 848 | bool |
| 849 | radv_cmd_buffer_upload_data(struct radv_cmd_buffer *cmd_buffer, |
| 850 | unsigned size, unsigned alignmnet, |
| 851 | const void *data, unsigned *out_offset); |
| 852 | void |
| 853 | radv_emit_framebuffer_state(struct radv_cmd_buffer *cmd_buffer); |
| 854 | void radv_cmd_buffer_clear_subpass(struct radv_cmd_buffer *cmd_buffer); |
| 855 | void radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer); |
| 856 | void radv_cayman_emit_msaa_sample_locs(struct radeon_winsys_cs *cs, int nr_samples); |
| 857 | unsigned radv_cayman_get_maxdist(int log_samples); |
| 858 | void radv_device_init_msaa(struct radv_device *device); |
| 859 | void radv_set_depth_clear_regs(struct radv_cmd_buffer *cmd_buffer, |
| 860 | struct radv_image *image, |
| 861 | VkClearDepthStencilValue ds_clear_value, |
| 862 | VkImageAspectFlags aspects); |
| 863 | void radv_set_color_clear_regs(struct radv_cmd_buffer *cmd_buffer, |
| 864 | struct radv_image *image, |
| 865 | int idx, |
| 866 | uint32_t color_values[2]); |
| 867 | void radv_fill_buffer(struct radv_cmd_buffer *cmd_buffer, |
| 868 | struct radeon_winsys_bo *bo, |
| 869 | uint64_t offset, uint64_t size, uint32_t value); |
Bas Nieuwenhuizen | 97dfff5 | 2016-12-23 23:51:18 +0100 | [diff] [blame] | 870 | void radv_cmd_buffer_trace_emit(struct radv_cmd_buffer *cmd_buffer); |
Dave Airlie | 15f4702 | 2017-02-26 23:52:08 +0000 | [diff] [blame] | 871 | bool radv_get_memory_fd(struct radv_device *device, |
| 872 | struct radv_device_memory *memory, |
| 873 | int *pFD); |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 874 | /* |
| 875 | * Takes x,y,z as exact numbers of invocations, instead of blocks. |
| 876 | * |
| 877 | * Limitations: Can't call normal dispatch functions without binding or rebinding |
| 878 | * the compute pipeline. |
| 879 | */ |
| 880 | void radv_unaligned_dispatch( |
| 881 | struct radv_cmd_buffer *cmd_buffer, |
| 882 | uint32_t x, |
| 883 | uint32_t y, |
| 884 | uint32_t z); |
| 885 | |
| 886 | struct radv_event { |
| 887 | struct radeon_winsys_bo *bo; |
| 888 | uint64_t *map; |
| 889 | }; |
| 890 | |
| 891 | struct nir_shader; |
| 892 | |
| 893 | struct radv_shader_module { |
| 894 | struct nir_shader * nir; |
| 895 | unsigned char sha1[20]; |
| 896 | uint32_t size; |
| 897 | char data[0]; |
| 898 | }; |
| 899 | |
| 900 | union ac_shader_variant_key; |
| 901 | |
| 902 | void |
| 903 | radv_hash_shader(unsigned char *hash, struct radv_shader_module *module, |
| 904 | const char *entrypoint, |
| 905 | const VkSpecializationInfo *spec_info, |
| 906 | const struct radv_pipeline_layout *layout, |
Dave Airlie | 99936d3 | 2017-01-20 09:55:37 +1000 | [diff] [blame] | 907 | const union ac_shader_variant_key *key, |
| 908 | uint32_t is_geom_copy_shader); |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 909 | |
| 910 | static inline gl_shader_stage |
| 911 | vk_to_mesa_shader_stage(VkShaderStageFlagBits vk_stage) |
| 912 | { |
| 913 | assert(__builtin_popcount(vk_stage) == 1); |
| 914 | return ffs(vk_stage) - 1; |
| 915 | } |
| 916 | |
| 917 | static inline VkShaderStageFlagBits |
| 918 | mesa_to_vk_shader_stage(gl_shader_stage mesa_stage) |
| 919 | { |
| 920 | return (1 << mesa_stage); |
| 921 | } |
| 922 | |
| 923 | #define RADV_STAGE_MASK ((1 << MESA_SHADER_STAGES) - 1) |
| 924 | |
| 925 | #define radv_foreach_stage(stage, stage_bits) \ |
| 926 | for (gl_shader_stage stage, \ |
| 927 | __tmp = (gl_shader_stage)((stage_bits) & RADV_STAGE_MASK); \ |
| 928 | stage = __builtin_ffs(__tmp) - 1, __tmp; \ |
| 929 | __tmp &= ~(1 << (stage))) |
| 930 | |
| 931 | struct radv_shader_variant { |
| 932 | uint32_t ref_count; |
| 933 | |
| 934 | struct radeon_winsys_bo *bo; |
| 935 | struct ac_shader_config config; |
| 936 | struct ac_shader_variant_info info; |
| 937 | unsigned rsrc1; |
| 938 | unsigned rsrc2; |
Dave Airlie | f395e34 | 2016-11-22 04:17:49 +0000 | [diff] [blame] | 939 | uint32_t code_size; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 940 | }; |
| 941 | |
| 942 | struct radv_depth_stencil_state { |
| 943 | uint32_t db_depth_control; |
| 944 | uint32_t db_stencil_control; |
| 945 | uint32_t db_render_control; |
| 946 | uint32_t db_render_override2; |
| 947 | }; |
| 948 | |
| 949 | struct radv_blend_state { |
| 950 | uint32_t cb_color_control; |
| 951 | uint32_t cb_target_mask; |
| 952 | uint32_t sx_mrt0_blend_opt[8]; |
| 953 | uint32_t cb_blend_control[8]; |
| 954 | |
| 955 | uint32_t spi_shader_col_format; |
| 956 | uint32_t cb_shader_mask; |
| 957 | uint32_t db_alpha_to_mask; |
| 958 | }; |
| 959 | |
| 960 | unsigned radv_format_meta_fs_key(VkFormat format); |
| 961 | |
| 962 | struct radv_raster_state { |
| 963 | uint32_t pa_cl_clip_cntl; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 964 | uint32_t spi_interp_control; |
| 965 | uint32_t pa_su_point_size; |
| 966 | uint32_t pa_su_point_minmax; |
| 967 | uint32_t pa_su_line_cntl; |
| 968 | uint32_t pa_su_vtx_cntl; |
| 969 | uint32_t pa_su_sc_mode_cntl; |
| 970 | }; |
| 971 | |
| 972 | struct radv_multisample_state { |
| 973 | uint32_t db_eqaa; |
| 974 | uint32_t pa_sc_line_cntl; |
| 975 | uint32_t pa_sc_mode_cntl_0; |
| 976 | uint32_t pa_sc_mode_cntl_1; |
| 977 | uint32_t pa_sc_aa_config; |
| 978 | uint32_t pa_sc_aa_mask[2]; |
| 979 | unsigned num_samples; |
| 980 | }; |
| 981 | |
Dave Airlie | 3360dbe | 2017-02-13 07:30:29 +0000 | [diff] [blame] | 982 | struct radv_prim_vertex_count { |
| 983 | uint8_t min; |
| 984 | uint8_t incr; |
| 985 | }; |
| 986 | |
Dave Airlie | 4c60c68 | 2017-03-30 08:18:13 +0100 | [diff] [blame] | 987 | struct radv_tessellation_state { |
| 988 | uint32_t ls_hs_config; |
| 989 | uint32_t tcs_in_layout; |
| 990 | uint32_t tcs_out_layout; |
| 991 | uint32_t tcs_out_offsets; |
| 992 | uint32_t offchip_layout; |
| 993 | unsigned num_patches; |
| 994 | unsigned lds_size; |
| 995 | unsigned num_tcs_input_cp; |
| 996 | uint32_t tf_param; |
| 997 | }; |
| 998 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 999 | struct radv_pipeline { |
| 1000 | struct radv_device * device; |
| 1001 | uint32_t dynamic_state_mask; |
| 1002 | struct radv_dynamic_state dynamic_state; |
| 1003 | |
| 1004 | struct radv_pipeline_layout * layout; |
| 1005 | |
| 1006 | bool needs_data_cache; |
| 1007 | |
| 1008 | struct radv_shader_variant * shaders[MESA_SHADER_STAGES]; |
Dave Airlie | 99936d3 | 2017-01-20 09:55:37 +1000 | [diff] [blame] | 1009 | struct radv_shader_variant *gs_copy_shader; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1010 | VkShaderStageFlags active_stages; |
| 1011 | |
| 1012 | uint32_t va_rsrc_word3[MAX_VERTEX_ATTRIBS]; |
| 1013 | uint32_t va_format_size[MAX_VERTEX_ATTRIBS]; |
| 1014 | uint32_t va_binding[MAX_VERTEX_ATTRIBS]; |
| 1015 | uint32_t va_offset[MAX_VERTEX_ATTRIBS]; |
| 1016 | uint32_t num_vertex_attribs; |
| 1017 | uint32_t binding_stride[MAX_VBS]; |
| 1018 | |
| 1019 | union { |
| 1020 | struct { |
| 1021 | struct radv_blend_state blend; |
| 1022 | struct radv_depth_stencil_state ds; |
| 1023 | struct radv_raster_state raster; |
| 1024 | struct radv_multisample_state ms; |
Dave Airlie | 4c60c68 | 2017-03-30 08:18:13 +0100 | [diff] [blame] | 1025 | struct radv_tessellation_state tess; |
Dave Airlie | 8996fdb | 2017-03-28 11:34:19 +1000 | [diff] [blame] | 1026 | uint32_t db_shader_control; |
Dave Airlie | 4b467c7 | 2017-03-28 11:34:46 +1000 | [diff] [blame] | 1027 | uint32_t shader_z_format; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1028 | unsigned prim; |
| 1029 | unsigned gs_out; |
Dave Airlie | cd33a5c | 2017-03-28 11:33:35 +1000 | [diff] [blame] | 1030 | uint32_t vgt_gs_mode; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1031 | bool prim_restart_enable; |
Dave Airlie | 8f41fe4 | 2017-01-20 10:21:19 +1000 | [diff] [blame] | 1032 | unsigned esgs_ring_size; |
| 1033 | unsigned gsvs_ring_size; |
Dave Airlie | 92e9c14 | 2017-03-28 11:43:48 +1000 | [diff] [blame] | 1034 | uint32_t ps_input_cntl[32]; |
| 1035 | uint32_t ps_input_cntl_num; |
Dave Airlie | 0232ea8 | 2017-03-28 11:48:38 +1000 | [diff] [blame] | 1036 | uint32_t pa_cl_vs_out_cntl; |
Dave Airlie | 239a922 | 2017-03-28 12:59:17 +1000 | [diff] [blame] | 1037 | uint32_t vgt_shader_stages_en; |
Dave Airlie | 3360dbe | 2017-02-13 07:30:29 +0000 | [diff] [blame] | 1038 | struct radv_prim_vertex_count prim_vertex_count; |
Bas Nieuwenhuizen | 8a53e6e4 | 2017-03-29 22:58:10 +0200 | [diff] [blame] | 1039 | bool can_use_guardband; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1040 | } graphics; |
| 1041 | }; |
Bas Nieuwenhuizen | ccff93e | 2017-01-29 15:20:03 +0100 | [diff] [blame] | 1042 | |
| 1043 | unsigned max_waves; |
| 1044 | unsigned scratch_bytes_per_wave; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1045 | }; |
| 1046 | |
Dave Airlie | f26fa87 | 2017-01-18 13:50:16 +1000 | [diff] [blame] | 1047 | static inline bool radv_pipeline_has_gs(struct radv_pipeline *pipeline) |
| 1048 | { |
| 1049 | return pipeline->shaders[MESA_SHADER_GEOMETRY] ? true : false; |
| 1050 | } |
| 1051 | |
Dave Airlie | 0604284 | 2017-03-30 07:44:20 +0100 | [diff] [blame] | 1052 | static inline bool radv_pipeline_has_tess(struct radv_pipeline *pipeline) |
| 1053 | { |
| 1054 | return pipeline->shaders[MESA_SHADER_TESS_EVAL] ? true : false; |
| 1055 | } |
| 1056 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1057 | struct radv_graphics_pipeline_create_info { |
| 1058 | bool use_rectlist; |
| 1059 | bool db_depth_clear; |
| 1060 | bool db_stencil_clear; |
| 1061 | bool db_depth_disable_expclear; |
| 1062 | bool db_stencil_disable_expclear; |
| 1063 | bool db_flush_depth_inplace; |
| 1064 | bool db_flush_stencil_inplace; |
| 1065 | bool db_resummarize; |
| 1066 | uint32_t custom_blend_mode; |
| 1067 | }; |
| 1068 | |
| 1069 | VkResult |
| 1070 | radv_pipeline_init(struct radv_pipeline *pipeline, struct radv_device *device, |
| 1071 | struct radv_pipeline_cache *cache, |
| 1072 | const VkGraphicsPipelineCreateInfo *pCreateInfo, |
| 1073 | const struct radv_graphics_pipeline_create_info *extra, |
| 1074 | const VkAllocationCallbacks *alloc); |
| 1075 | |
| 1076 | VkResult |
| 1077 | radv_graphics_pipeline_create(VkDevice device, |
| 1078 | VkPipelineCache cache, |
| 1079 | const VkGraphicsPipelineCreateInfo *pCreateInfo, |
| 1080 | const struct radv_graphics_pipeline_create_info *extra, |
| 1081 | const VkAllocationCallbacks *alloc, |
| 1082 | VkPipeline *pPipeline); |
| 1083 | |
| 1084 | struct vk_format_description; |
| 1085 | uint32_t radv_translate_buffer_dataformat(const struct vk_format_description *desc, |
| 1086 | int first_non_void); |
| 1087 | uint32_t radv_translate_buffer_numformat(const struct vk_format_description *desc, |
| 1088 | int first_non_void); |
| 1089 | uint32_t radv_translate_colorformat(VkFormat format); |
| 1090 | uint32_t radv_translate_color_numformat(VkFormat format, |
| 1091 | const struct vk_format_description *desc, |
| 1092 | int first_non_void); |
| 1093 | uint32_t radv_colorformat_endian_swap(uint32_t colorformat); |
| 1094 | unsigned radv_translate_colorswap(VkFormat format, bool do_endian_swap); |
| 1095 | uint32_t radv_translate_dbformat(VkFormat format); |
| 1096 | uint32_t radv_translate_tex_dataformat(VkFormat format, |
| 1097 | const struct vk_format_description *desc, |
| 1098 | int first_non_void); |
| 1099 | uint32_t radv_translate_tex_numformat(VkFormat format, |
| 1100 | const struct vk_format_description *desc, |
| 1101 | int first_non_void); |
| 1102 | bool radv_format_pack_clear_color(VkFormat format, |
| 1103 | uint32_t clear_vals[2], |
| 1104 | VkClearColorValue *value); |
| 1105 | bool radv_is_colorbuffer_format_supported(VkFormat format, bool *blendable); |
| 1106 | |
| 1107 | struct radv_fmask_info { |
| 1108 | uint64_t offset; |
| 1109 | uint64_t size; |
| 1110 | unsigned alignment; |
| 1111 | unsigned pitch_in_pixels; |
| 1112 | unsigned bank_height; |
| 1113 | unsigned slice_tile_max; |
| 1114 | unsigned tile_mode_index; |
| 1115 | }; |
| 1116 | |
| 1117 | struct radv_cmask_info { |
| 1118 | uint64_t offset; |
| 1119 | uint64_t size; |
| 1120 | unsigned alignment; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1121 | unsigned slice_tile_max; |
| 1122 | unsigned base_address_reg; |
| 1123 | }; |
| 1124 | |
| 1125 | struct r600_htile_info { |
| 1126 | uint64_t offset; |
| 1127 | uint64_t size; |
| 1128 | unsigned pitch; |
| 1129 | unsigned height; |
| 1130 | unsigned xalign; |
| 1131 | unsigned yalign; |
| 1132 | }; |
| 1133 | |
| 1134 | struct radv_image { |
| 1135 | VkImageType type; |
| 1136 | /* The original VkFormat provided by the client. This may not match any |
| 1137 | * of the actual surface formats. |
| 1138 | */ |
| 1139 | VkFormat vk_format; |
| 1140 | VkImageAspectFlags aspects; |
| 1141 | VkExtent3D extent; |
| 1142 | uint32_t levels; |
| 1143 | uint32_t array_size; |
| 1144 | uint32_t samples; /**< VkImageCreateInfo::samples */ |
| 1145 | VkImageUsageFlags usage; /**< Superset of VkImageCreateInfo::usage. */ |
| 1146 | VkImageTiling tiling; /** VkImageCreateInfo::tiling */ |
Bas Nieuwenhuizen | 6154efc | 2017-02-04 15:56:20 +0100 | [diff] [blame] | 1147 | VkImageCreateFlags flags; /** VkImageCreateInfo::flags */ |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1148 | |
| 1149 | VkDeviceSize size; |
| 1150 | uint32_t alignment; |
| 1151 | |
Bas Nieuwenhuizen | accc5fc | 2016-12-17 21:25:32 +0100 | [diff] [blame] | 1152 | bool exclusive; |
| 1153 | unsigned queue_family_mask; |
| 1154 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1155 | /* Set when bound */ |
| 1156 | struct radeon_winsys_bo *bo; |
| 1157 | VkDeviceSize offset; |
| 1158 | uint32_t dcc_offset; |
Bas Nieuwenhuizen | 3b455c1 | 2017-03-07 00:58:04 +0100 | [diff] [blame] | 1159 | uint32_t htile_offset; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1160 | struct radeon_surf surface; |
| 1161 | |
| 1162 | struct radv_fmask_info fmask; |
| 1163 | struct radv_cmask_info cmask; |
| 1164 | uint32_t clear_value_offset; |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1165 | }; |
| 1166 | |
| 1167 | bool radv_layout_has_htile(const struct radv_image *image, |
| 1168 | VkImageLayout layout); |
| 1169 | bool radv_layout_is_htile_compressed(const struct radv_image *image, |
| 1170 | VkImageLayout layout); |
| 1171 | bool radv_layout_can_expclear(const struct radv_image *image, |
| 1172 | VkImageLayout layout); |
Bas Nieuwenhuizen | 059af25 | 2016-12-27 00:57:36 +0100 | [diff] [blame] | 1173 | bool radv_layout_can_fast_clear(const struct radv_image *image, |
| 1174 | VkImageLayout layout, |
| 1175 | unsigned queue_mask); |
Bas Nieuwenhuizen | accc5fc | 2016-12-17 21:25:32 +0100 | [diff] [blame] | 1176 | |
| 1177 | |
Dave Airlie | cda9f3d | 2017-01-31 15:18:33 +1000 | [diff] [blame] | 1178 | unsigned radv_image_queue_family_mask(const struct radv_image *image, uint32_t family, uint32_t queue_family); |
Bas Nieuwenhuizen | accc5fc | 2016-12-17 21:25:32 +0100 | [diff] [blame] | 1179 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1180 | static inline uint32_t |
| 1181 | radv_get_layerCount(const struct radv_image *image, |
| 1182 | const VkImageSubresourceRange *range) |
| 1183 | { |
| 1184 | return range->layerCount == VK_REMAINING_ARRAY_LAYERS ? |
| 1185 | image->array_size - range->baseArrayLayer : range->layerCount; |
| 1186 | } |
| 1187 | |
| 1188 | static inline uint32_t |
| 1189 | radv_get_levelCount(const struct radv_image *image, |
| 1190 | const VkImageSubresourceRange *range) |
| 1191 | { |
| 1192 | return range->levelCount == VK_REMAINING_MIP_LEVELS ? |
| 1193 | image->levels - range->baseMipLevel : range->levelCount; |
| 1194 | } |
| 1195 | |
| 1196 | struct radeon_bo_metadata; |
| 1197 | void |
| 1198 | radv_init_metadata(struct radv_device *device, |
| 1199 | struct radv_image *image, |
| 1200 | struct radeon_bo_metadata *metadata); |
| 1201 | |
| 1202 | struct radv_image_view { |
| 1203 | struct radv_image *image; /**< VkImageViewCreateInfo::image */ |
| 1204 | struct radeon_winsys_bo *bo; |
| 1205 | |
| 1206 | VkImageViewType type; |
| 1207 | VkImageAspectFlags aspect_mask; |
| 1208 | VkFormat vk_format; |
| 1209 | uint32_t base_layer; |
| 1210 | uint32_t layer_count; |
| 1211 | uint32_t base_mip; |
| 1212 | VkExtent3D extent; /**< Extent of VkImageViewCreateInfo::baseMipLevel. */ |
| 1213 | |
| 1214 | uint32_t descriptor[8]; |
| 1215 | uint32_t fmask_descriptor[8]; |
| 1216 | }; |
| 1217 | |
| 1218 | struct radv_image_create_info { |
| 1219 | const VkImageCreateInfo *vk_info; |
| 1220 | uint32_t stride; |
| 1221 | bool scanout; |
| 1222 | }; |
| 1223 | |
| 1224 | VkResult radv_image_create(VkDevice _device, |
| 1225 | const struct radv_image_create_info *info, |
| 1226 | const VkAllocationCallbacks* alloc, |
| 1227 | VkImage *pImage); |
| 1228 | |
| 1229 | void radv_image_view_init(struct radv_image_view *view, |
| 1230 | struct radv_device *device, |
| 1231 | const VkImageViewCreateInfo* pCreateInfo, |
| 1232 | struct radv_cmd_buffer *cmd_buffer, |
| 1233 | VkImageUsageFlags usage_mask); |
| 1234 | void radv_image_set_optimal_micro_tile_mode(struct radv_device *device, |
| 1235 | struct radv_image *image, uint32_t micro_tile_mode); |
| 1236 | struct radv_buffer_view { |
| 1237 | struct radeon_winsys_bo *bo; |
| 1238 | VkFormat vk_format; |
| 1239 | uint64_t range; /**< VkBufferViewCreateInfo::range */ |
| 1240 | uint32_t state[4]; |
| 1241 | }; |
| 1242 | void radv_buffer_view_init(struct radv_buffer_view *view, |
| 1243 | struct radv_device *device, |
| 1244 | const VkBufferViewCreateInfo* pCreateInfo, |
| 1245 | struct radv_cmd_buffer *cmd_buffer); |
| 1246 | |
| 1247 | static inline struct VkExtent3D |
| 1248 | radv_sanitize_image_extent(const VkImageType imageType, |
| 1249 | const struct VkExtent3D imageExtent) |
| 1250 | { |
| 1251 | switch (imageType) { |
| 1252 | case VK_IMAGE_TYPE_1D: |
| 1253 | return (VkExtent3D) { imageExtent.width, 1, 1 }; |
| 1254 | case VK_IMAGE_TYPE_2D: |
| 1255 | return (VkExtent3D) { imageExtent.width, imageExtent.height, 1 }; |
| 1256 | case VK_IMAGE_TYPE_3D: |
| 1257 | return imageExtent; |
| 1258 | default: |
| 1259 | unreachable("invalid image type"); |
| 1260 | } |
| 1261 | } |
| 1262 | |
| 1263 | static inline struct VkOffset3D |
| 1264 | radv_sanitize_image_offset(const VkImageType imageType, |
| 1265 | const struct VkOffset3D imageOffset) |
| 1266 | { |
| 1267 | switch (imageType) { |
| 1268 | case VK_IMAGE_TYPE_1D: |
| 1269 | return (VkOffset3D) { imageOffset.x, 0, 0 }; |
| 1270 | case VK_IMAGE_TYPE_2D: |
| 1271 | return (VkOffset3D) { imageOffset.x, imageOffset.y, 0 }; |
| 1272 | case VK_IMAGE_TYPE_3D: |
| 1273 | return imageOffset; |
| 1274 | default: |
| 1275 | unreachable("invalid image type"); |
| 1276 | } |
| 1277 | } |
| 1278 | |
| 1279 | struct radv_sampler { |
| 1280 | uint32_t state[4]; |
| 1281 | }; |
| 1282 | |
| 1283 | struct radv_color_buffer_info { |
| 1284 | uint32_t cb_color_base; |
| 1285 | uint32_t cb_color_pitch; |
| 1286 | uint32_t cb_color_slice; |
| 1287 | uint32_t cb_color_view; |
| 1288 | uint32_t cb_color_info; |
| 1289 | uint32_t cb_color_attrib; |
| 1290 | uint32_t cb_dcc_control; |
| 1291 | uint32_t cb_color_cmask; |
| 1292 | uint32_t cb_color_cmask_slice; |
| 1293 | uint32_t cb_color_fmask; |
| 1294 | uint32_t cb_color_fmask_slice; |
| 1295 | uint32_t cb_clear_value0; |
| 1296 | uint32_t cb_clear_value1; |
| 1297 | uint32_t cb_dcc_base; |
| 1298 | uint32_t micro_tile_mode; |
| 1299 | }; |
| 1300 | |
| 1301 | struct radv_ds_buffer_info { |
| 1302 | uint32_t db_depth_info; |
| 1303 | uint32_t db_z_info; |
| 1304 | uint32_t db_stencil_info; |
| 1305 | uint32_t db_z_read_base; |
| 1306 | uint32_t db_stencil_read_base; |
| 1307 | uint32_t db_z_write_base; |
| 1308 | uint32_t db_stencil_write_base; |
| 1309 | uint32_t db_depth_view; |
| 1310 | uint32_t db_depth_size; |
| 1311 | uint32_t db_depth_slice; |
| 1312 | uint32_t db_htile_surface; |
| 1313 | uint32_t db_htile_data_base; |
| 1314 | uint32_t pa_su_poly_offset_db_fmt_cntl; |
| 1315 | float offset_scale; |
| 1316 | }; |
| 1317 | |
| 1318 | struct radv_attachment_info { |
| 1319 | union { |
| 1320 | struct radv_color_buffer_info cb; |
| 1321 | struct radv_ds_buffer_info ds; |
| 1322 | }; |
| 1323 | struct radv_image_view *attachment; |
| 1324 | }; |
| 1325 | |
| 1326 | struct radv_framebuffer { |
| 1327 | uint32_t width; |
| 1328 | uint32_t height; |
| 1329 | uint32_t layers; |
| 1330 | |
| 1331 | uint32_t attachment_count; |
| 1332 | struct radv_attachment_info attachments[0]; |
| 1333 | }; |
| 1334 | |
| 1335 | struct radv_subpass_barrier { |
| 1336 | VkPipelineStageFlags src_stage_mask; |
| 1337 | VkAccessFlags src_access_mask; |
| 1338 | VkAccessFlags dst_access_mask; |
| 1339 | }; |
| 1340 | |
| 1341 | struct radv_subpass { |
| 1342 | uint32_t input_count; |
| 1343 | VkAttachmentReference * input_attachments; |
| 1344 | uint32_t color_count; |
| 1345 | VkAttachmentReference * color_attachments; |
| 1346 | VkAttachmentReference * resolve_attachments; |
| 1347 | VkAttachmentReference depth_stencil_attachment; |
| 1348 | |
| 1349 | /** Subpass has at least one resolve attachment */ |
| 1350 | bool has_resolve; |
| 1351 | |
| 1352 | struct radv_subpass_barrier start_barrier; |
| 1353 | }; |
| 1354 | |
| 1355 | struct radv_render_pass_attachment { |
| 1356 | VkFormat format; |
| 1357 | uint32_t samples; |
| 1358 | VkAttachmentLoadOp load_op; |
| 1359 | VkAttachmentLoadOp stencil_load_op; |
| 1360 | VkImageLayout initial_layout; |
| 1361 | VkImageLayout final_layout; |
| 1362 | }; |
| 1363 | |
| 1364 | struct radv_render_pass { |
| 1365 | uint32_t attachment_count; |
| 1366 | uint32_t subpass_count; |
| 1367 | VkAttachmentReference * subpass_attachments; |
| 1368 | struct radv_render_pass_attachment * attachments; |
| 1369 | struct radv_subpass_barrier end_barrier; |
| 1370 | struct radv_subpass subpasses[0]; |
| 1371 | }; |
| 1372 | |
| 1373 | VkResult radv_device_init_meta(struct radv_device *device); |
| 1374 | void radv_device_finish_meta(struct radv_device *device); |
| 1375 | |
| 1376 | struct radv_query_pool { |
| 1377 | struct radeon_winsys_bo *bo; |
| 1378 | uint32_t stride; |
| 1379 | uint32_t availability_offset; |
| 1380 | char *ptr; |
| 1381 | VkQueryType type; |
| 1382 | }; |
| 1383 | |
| 1384 | VkResult |
| 1385 | radv_temp_descriptor_set_create(struct radv_device *device, |
| 1386 | struct radv_cmd_buffer *cmd_buffer, |
| 1387 | VkDescriptorSetLayout _layout, |
| 1388 | VkDescriptorSet *_set); |
| 1389 | |
| 1390 | void |
| 1391 | radv_temp_descriptor_set_destroy(struct radv_device *device, |
| 1392 | VkDescriptorSet _set); |
Fredrik Höglund | a6e94a8 | 2017-03-29 18:08:06 +0200 | [diff] [blame] | 1393 | |
| 1394 | void |
| 1395 | radv_update_descriptor_sets(struct radv_device *device, |
| 1396 | struct radv_cmd_buffer *cmd_buffer, |
| 1397 | VkDescriptorSet overrideSet, |
| 1398 | uint32_t descriptorWriteCount, |
| 1399 | const VkWriteDescriptorSet *pDescriptorWrites, |
| 1400 | uint32_t descriptorCopyCount, |
| 1401 | const VkCopyDescriptorSet *pDescriptorCopies); |
| 1402 | |
Fredrik Höglund | c1f8c83 | 2017-03-29 19:19:47 +0200 | [diff] [blame] | 1403 | void |
| 1404 | radv_update_descriptor_set_with_template(struct radv_device *device, |
| 1405 | struct radv_cmd_buffer *cmd_buffer, |
| 1406 | struct radv_descriptor_set *set, |
| 1407 | VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, |
| 1408 | const void *pData); |
| 1409 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1410 | void radv_initialise_cmask(struct radv_cmd_buffer *cmd_buffer, |
| 1411 | struct radv_image *image, uint32_t value); |
| 1412 | void radv_initialize_dcc(struct radv_cmd_buffer *cmd_buffer, |
| 1413 | struct radv_image *image, uint32_t value); |
Fredrik Höglund | 0a153f4 | 2016-10-25 20:31:41 +0200 | [diff] [blame] | 1414 | |
| 1415 | struct radv_fence { |
| 1416 | struct radeon_winsys_fence *fence; |
| 1417 | bool submitted; |
| 1418 | bool signalled; |
| 1419 | }; |
| 1420 | |
Grazvydas Ignotas | 5458b02 | 2017-03-05 23:04:50 +0200 | [diff] [blame] | 1421 | struct radeon_winsys_sem; |
| 1422 | |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1423 | #define RADV_DEFINE_HANDLE_CASTS(__radv_type, __VkType) \ |
| 1424 | \ |
| 1425 | static inline struct __radv_type * \ |
| 1426 | __radv_type ## _from_handle(__VkType _handle) \ |
| 1427 | { \ |
| 1428 | return (struct __radv_type *) _handle; \ |
| 1429 | } \ |
| 1430 | \ |
| 1431 | static inline __VkType \ |
| 1432 | __radv_type ## _to_handle(struct __radv_type *_obj) \ |
| 1433 | { \ |
| 1434 | return (__VkType) _obj; \ |
| 1435 | } |
| 1436 | |
| 1437 | #define RADV_DEFINE_NONDISP_HANDLE_CASTS(__radv_type, __VkType) \ |
| 1438 | \ |
| 1439 | static inline struct __radv_type * \ |
| 1440 | __radv_type ## _from_handle(__VkType _handle) \ |
| 1441 | { \ |
| 1442 | return (struct __radv_type *)(uintptr_t) _handle; \ |
| 1443 | } \ |
| 1444 | \ |
| 1445 | static inline __VkType \ |
| 1446 | __radv_type ## _to_handle(struct __radv_type *_obj) \ |
| 1447 | { \ |
| 1448 | return (__VkType)(uintptr_t) _obj; \ |
| 1449 | } |
| 1450 | |
| 1451 | #define RADV_FROM_HANDLE(__radv_type, __name, __handle) \ |
| 1452 | struct __radv_type *__name = __radv_type ## _from_handle(__handle) |
| 1453 | |
| 1454 | RADV_DEFINE_HANDLE_CASTS(radv_cmd_buffer, VkCommandBuffer) |
| 1455 | RADV_DEFINE_HANDLE_CASTS(radv_device, VkDevice) |
| 1456 | RADV_DEFINE_HANDLE_CASTS(radv_instance, VkInstance) |
| 1457 | RADV_DEFINE_HANDLE_CASTS(radv_physical_device, VkPhysicalDevice) |
| 1458 | RADV_DEFINE_HANDLE_CASTS(radv_queue, VkQueue) |
| 1459 | |
| 1460 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_cmd_pool, VkCommandPool) |
| 1461 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_buffer, VkBuffer) |
| 1462 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_buffer_view, VkBufferView) |
| 1463 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_pool, VkDescriptorPool) |
| 1464 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set, VkDescriptorSet) |
| 1465 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set_layout, VkDescriptorSetLayout) |
Fredrik Höglund | c1f8c83 | 2017-03-29 19:19:47 +0200 | [diff] [blame] | 1466 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_update_template, VkDescriptorUpdateTemplateKHR) |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1467 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_device_memory, VkDeviceMemory) |
| 1468 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_fence, VkFence) |
| 1469 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_event, VkEvent) |
| 1470 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_framebuffer, VkFramebuffer) |
| 1471 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_image, VkImage) |
| 1472 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_image_view, VkImageView); |
| 1473 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_pipeline_cache, VkPipelineCache) |
| 1474 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_pipeline, VkPipeline) |
| 1475 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_pipeline_layout, VkPipelineLayout) |
| 1476 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_query_pool, VkQueryPool) |
| 1477 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_render_pass, VkRenderPass) |
| 1478 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_sampler, VkSampler) |
| 1479 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_shader_module, VkShaderModule) |
Grazvydas Ignotas | 5458b02 | 2017-03-05 23:04:50 +0200 | [diff] [blame] | 1480 | RADV_DEFINE_NONDISP_HANDLE_CASTS(radeon_winsys_sem, VkSemaphore) |
Dave Airlie | f4e499e | 2016-10-07 09:16:09 +1000 | [diff] [blame] | 1481 | |
Edward O'Callaghan | ba43768 | 2016-10-07 22:19:19 +1100 | [diff] [blame] | 1482 | #endif /* RADV_PRIVATE_H */ |