blob: f303878bc1e61f622ab60925e73d9ebbef8d5513 [file] [log] [blame]
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001/*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "private.h"
31
32static int
33anv_env_get_int(const char *name)
34{
35 const char *val = getenv(name);
36
37 if (!val)
38 return 0;
39
40 return strtol(val, NULL, 0);
41}
42
43static VkResult
44fill_physical_device(struct anv_physical_device *device,
45 struct anv_instance *instance,
46 const char *path)
47{
48 int fd;
49
50 fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
51 if (fd < 0)
52 return vk_error(VK_ERROR_UNAVAILABLE);
53
54 device->instance = instance;
55 device->path = path;
56
57 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
58 device->no_hw = false;
59 if (device->chipset_id) {
60 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
61 device->no_hw = true;
62 } else {
63 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
64 }
65 if (!device->chipset_id)
66 goto fail;
67
68 device->name = brw_get_device_name(device->chipset_id);
69 device->info = brw_get_device_info(device->chipset_id, -1);
70 if (!device->info)
71 goto fail;
72
73 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
74 goto fail;
75
76 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
77 goto fail;
78
79 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
80 goto fail;
81
82 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
83 goto fail;
84
85 close(fd);
86
87 return VK_SUCCESS;
88
89 fail:
90 close(fd);
91
92 return vk_error(VK_ERROR_UNAVAILABLE);
93}
94
95static void *default_alloc(
96 void* pUserData,
97 size_t size,
98 size_t alignment,
99 VkSystemAllocType allocType)
100{
101 return malloc(size);
102}
103
104static void default_free(
105 void* pUserData,
106 void* pMem)
107{
108 free(pMem);
109}
110
111static const VkAllocCallbacks default_alloc_callbacks = {
112 .pUserData = NULL,
113 .pfnAlloc = default_alloc,
114 .pfnFree = default_free
115};
116
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700117VkResult anv_CreateInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700118 const VkInstanceCreateInfo* pCreateInfo,
119 VkInstance* pInstance)
120{
121 struct anv_instance *instance;
122 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
123 void *user_data = NULL;
124 VkResult result;
125
126 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
127
128 if (pCreateInfo->pAllocCb) {
129 alloc_callbacks = pCreateInfo->pAllocCb;
130 user_data = pCreateInfo->pAllocCb->pUserData;
131 }
132 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
133 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
134 if (!instance)
135 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
136
137 instance->pAllocUserData = alloc_callbacks->pUserData;
138 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
139 instance->pfnFree = alloc_callbacks->pfnFree;
140 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
141
142 instance->physicalDeviceCount = 0;
143 result = fill_physical_device(&instance->physicalDevice,
144 instance, "/dev/dri/renderD128");
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700145
Chad Versacea61f3072015-05-20 19:51:10 -0700146 if (result != VK_SUCCESS)
147 return result;
148
149 instance->physicalDeviceCount++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700150 *pInstance = (VkInstance) instance;
151
152 return VK_SUCCESS;
153}
154
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700155VkResult anv_DestroyInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700156 VkInstance _instance)
157{
158 struct anv_instance *instance = (struct anv_instance *) _instance;
159
160 instance->pfnFree(instance->pAllocUserData, instance);
161
162 return VK_SUCCESS;
163}
164
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700165VkResult anv_EnumeratePhysicalDevices(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700166 VkInstance _instance,
167 uint32_t* pPhysicalDeviceCount,
168 VkPhysicalDevice* pPhysicalDevices)
169{
170 struct anv_instance *instance = (struct anv_instance *) _instance;
171
172 if (*pPhysicalDeviceCount >= 1)
173 pPhysicalDevices[0] = (VkPhysicalDevice) &instance->physicalDevice;
174 *pPhysicalDeviceCount = instance->physicalDeviceCount;
175
176 return VK_SUCCESS;
177}
178
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700179VkResult anv_GetPhysicalDeviceInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700180 VkPhysicalDevice physicalDevice,
181 VkPhysicalDeviceInfoType infoType,
182 size_t* pDataSize,
183 void* pData)
184{
185 struct anv_physical_device *device = (struct anv_physical_device *) physicalDevice;
186 VkPhysicalDeviceProperties *properties;
187 VkPhysicalDevicePerformance *performance;
188 VkPhysicalDeviceQueueProperties *queue_properties;
189 VkPhysicalDeviceMemoryProperties *memory_properties;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700190 VkDisplayPropertiesWSI *display_properties;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700191 uint64_t ns_per_tick = 80;
192
Kristian Høgsberga29df712015-05-15 22:04:52 -0700193 switch ((uint32_t) infoType) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700194 case VK_PHYSICAL_DEVICE_INFO_TYPE_PROPERTIES:
195 properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700196
197 *pDataSize = sizeof(*properties);
198 if (pData == NULL)
199 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700200
201 properties->apiVersion = 1;
202 properties->driverVersion = 1;
203 properties->vendorId = 0x8086;
204 properties->deviceId = device->chipset_id;
205 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
206 strcpy(properties->deviceName, device->name);
207 properties->maxInlineMemoryUpdateSize = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700208 properties->maxBoundDescriptorSets = MAX_SETS;
209 properties->maxThreadGroupSize = 512;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700210 properties->timestampFrequency = 1000 * 1000 * 1000 / ns_per_tick;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700211 properties->multiColorAttachmentClears = true;
212 properties->maxDescriptorSets = 8;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700213 properties->maxViewports = 16;
214 properties->maxColorAttachments = 8;
215 return VK_SUCCESS;
216
217 case VK_PHYSICAL_DEVICE_INFO_TYPE_PERFORMANCE:
218 performance = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700219
220 *pDataSize = sizeof(*performance);
221 if (pData == NULL)
222 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700223
224 performance->maxDeviceClock = 1.0;
225 performance->aluPerClock = 1.0;
226 performance->texPerClock = 1.0;
227 performance->primsPerClock = 1.0;
228 performance->pixelsPerClock = 1.0;
229 return VK_SUCCESS;
230
231 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PROPERTIES:
232 queue_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700233
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700234 *pDataSize = sizeof(*queue_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700235 if (pData == NULL)
236 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700237
238 queue_properties->queueFlags = 0;
239 queue_properties->queueCount = 1;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700240 queue_properties->supportsTimestamps = true;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700241 return VK_SUCCESS;
242
243 case VK_PHYSICAL_DEVICE_INFO_TYPE_MEMORY_PROPERTIES:
244 memory_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700245
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700246 *pDataSize = sizeof(*memory_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700247 if (pData == NULL)
248 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700249
250 memory_properties->supportsMigration = false;
251 memory_properties->supportsPinning = false;
252 return VK_SUCCESS;
253
Kristian Høgsberga29df712015-05-15 22:04:52 -0700254 case VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI:
255 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI");
256
257 *pDataSize = sizeof(*display_properties);
258 if (pData == NULL)
259 return VK_SUCCESS;
260
261 display_properties = pData;
262 display_properties->display = 0;
263 display_properties->physicalResolution = (VkExtent2D) { 0, 0 };
264 return VK_SUCCESS;
265
266 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI:
267 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI");
268 return VK_SUCCESS;
269
270
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700271 default:
272 return VK_UNSUPPORTED;
273 }
274
275}
276
Jason Ekstrande7acdda2015-07-07 18:51:53 -0700277PFN_vkVoidFunction anv_GetInstanceProcAddr(
278 VkInstance instance,
279 const char* pName)
280{
281 return anv_lookup_entrypoint(pName);
282}
283
284PFN_vkVoidFunction anv_GetDeviceProcAddr(
285 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700286 const char* pName)
287{
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700288 return anv_lookup_entrypoint(pName);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700289}
290
291static void
292parse_debug_flags(struct anv_device *device)
293{
294 const char *debug, *p, *end;
295
296 debug = getenv("INTEL_DEBUG");
297 device->dump_aub = false;
298 if (debug) {
299 for (p = debug; *p; p = end + 1) {
300 end = strchrnul(p, ',');
301 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
302 device->dump_aub = true;
303 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
304 device->no_hw = true;
305 if (*end == '\0')
306 break;
307 }
308 }
309}
310
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700311static VkResult
312anv_queue_init(struct anv_device *device, struct anv_queue *queue)
313{
314 queue->device = device;
315 queue->pool = &device->surface_state_pool;
316
317 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
318 if (queue->completed_serial.map == NULL)
319 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
320
321 *(uint32_t *)queue->completed_serial.map = 0;
322 queue->next_serial = 1;
323
324 return VK_SUCCESS;
325}
326
327static void
328anv_queue_finish(struct anv_queue *queue)
329{
330#ifdef HAVE_VALGRIND
331 /* This gets torn down with the device so we only need to do this if
332 * valgrind is present.
333 */
334 anv_state_pool_free(queue->pool, queue->completed_serial);
335#endif
336}
337
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700338static void
339anv_device_init_border_colors(struct anv_device *device)
340{
Jason Ekstrand522ab832015-07-08 11:44:52 -0700341 static const VkClearColorValue border_colors[] = {
342 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
343 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
344 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
345 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
346 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
347 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700348 };
349
Jason Ekstrand522ab832015-07-08 11:44:52 -0700350 device->border_colors =
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700351 anv_state_pool_alloc(&device->dynamic_state_pool,
Jason Ekstrand522ab832015-07-08 11:44:52 -0700352 sizeof(border_colors), 32);
353 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700354}
355
Jason Ekstrand730ca0e2015-05-28 10:20:18 -0700356static const uint32_t BATCH_SIZE = 8192;
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700357
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700358VkResult anv_CreateDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700359 VkPhysicalDevice _physicalDevice,
360 const VkDeviceCreateInfo* pCreateInfo,
361 VkDevice* pDevice)
362{
363 struct anv_physical_device *physicalDevice =
364 (struct anv_physical_device *) _physicalDevice;
365 struct anv_instance *instance = physicalDevice->instance;
366 struct anv_device *device;
367
368 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
369
370 device = instance->pfnAlloc(instance->pAllocUserData,
371 sizeof(*device), 8,
372 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
373 if (!device)
374 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
375
376 device->no_hw = physicalDevice->no_hw;
377 parse_debug_flags(device);
378
379 device->instance = physicalDevice->instance;
380 device->fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
381 if (device->fd == -1)
382 goto fail_device;
383
384 device->context_id = anv_gem_create_context(device);
385 if (device->context_id == -1)
386 goto fail_fd;
387
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700388 anv_bo_pool_init(&device->batch_bo_pool, device, BATCH_SIZE);
389
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700390 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700391
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700392 anv_state_pool_init(&device->dynamic_state_pool,
393 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700394
395 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
396 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
397
398 anv_state_pool_init(&device->surface_state_pool,
399 &device->surface_state_block_pool);
400
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -0700401 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
402
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700403 device->info = *physicalDevice->info;
404
Kristian Høgsberg Kristensen9eab70e2015-06-03 23:03:29 -0700405 device->compiler = anv_compiler_create(device);
406 device->aub_writer = NULL;
407
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700408 pthread_mutex_init(&device->mutex, NULL);
409
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700410 anv_queue_init(device, &device->queue);
411
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -0700412 anv_device_init_meta(device);
413
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700414 anv_device_init_border_colors(device);
415
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700416 *pDevice = (VkDevice) device;
417
418 return VK_SUCCESS;
419
420 fail_fd:
421 close(device->fd);
422 fail_device:
423 anv_device_free(device, device);
424
425 return vk_error(VK_ERROR_UNAVAILABLE);
426}
427
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700428VkResult anv_DestroyDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700429 VkDevice _device)
430{
431 struct anv_device *device = (struct anv_device *) _device;
432
433 anv_compiler_destroy(device->compiler);
434
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700435 anv_queue_finish(&device->queue);
436
Jason Ekstrand3a38b0d2015-06-09 11:08:51 -0700437 anv_device_finish_meta(device);
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700438
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700439#ifdef HAVE_VALGRIND
440 /* We only need to free these to prevent valgrind errors. The backing
441 * BO will go away in a couple of lines so we don't actually leak.
442 */
Jason Ekstrand522ab832015-07-08 11:44:52 -0700443 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700444#endif
445
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700446 anv_bo_pool_finish(&device->batch_bo_pool);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700447 anv_block_pool_finish(&device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700448 anv_block_pool_finish(&device->instruction_block_pool);
449 anv_block_pool_finish(&device->surface_state_block_pool);
450
451 close(device->fd);
452
453 if (device->aub_writer)
454 anv_aub_writer_destroy(device->aub_writer);
455
456 anv_device_free(device, device);
457
458 return VK_SUCCESS;
459}
460
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700461static const VkExtensionProperties global_extensions[] = {
462 {
463 .extName = "VK_WSI_LunarG",
464 .version = 3
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700465 }
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700466};
467
468VkResult anv_GetGlobalExtensionCount(
469 uint32_t* pCount)
470{
471 *pCount = ARRAY_SIZE(global_extensions);
472
473 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700474}
475
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700476
477VkResult anv_GetGlobalExtensionProperties(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700478 uint32_t extensionIndex,
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700479 VkExtensionProperties* pProperties)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700480{
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700481 assert(extensionIndex < ARRAY_SIZE(global_extensions));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700482
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700483 *pProperties = global_extensions[extensionIndex];
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700484
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700485 return VK_SUCCESS;
486}
487
488VkResult anv_GetPhysicalDeviceExtensionCount(
489 VkPhysicalDevice physicalDevice,
490 uint32_t* pCount)
491{
492 /* None supported at this time */
493 *pCount = 0;
494
495 return VK_SUCCESS;
496}
497
498VkResult anv_GetPhysicalDeviceExtensionProperties(
499 VkPhysicalDevice physicalDevice,
500 uint32_t extensionIndex,
501 VkExtensionProperties* pProperties)
502{
503 /* None supported at this time */
504 return vk_error(VK_ERROR_INVALID_EXTENSION);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700505}
506
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700507VkResult anv_EnumerateLayers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700508 VkPhysicalDevice physicalDevice,
509 size_t maxStringSize,
510 size_t* pLayerCount,
511 char* const* pOutLayers,
512 void* pReserved)
513{
514 *pLayerCount = 0;
515
516 return VK_SUCCESS;
517}
518
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700519VkResult anv_GetDeviceQueue(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700520 VkDevice _device,
521 uint32_t queueNodeIndex,
522 uint32_t queueIndex,
523 VkQueue* pQueue)
524{
525 struct anv_device *device = (struct anv_device *) _device;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700526
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700527 assert(queueIndex == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700528
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700529 *pQueue = (VkQueue) &device->queue;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700530
531 return VK_SUCCESS;
532}
533
Jason Ekstrand59def432015-05-27 11:41:28 -0700534VkResult
Jason Ekstrand403266b2015-05-25 17:38:15 -0700535anv_reloc_list_init(struct anv_reloc_list *list, struct anv_device *device)
536{
537 list->num_relocs = 0;
538 list->array_length = 256;
539 list->relocs =
540 anv_device_alloc(device, list->array_length * sizeof(*list->relocs), 8,
541 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
542
543 if (list->relocs == NULL)
544 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
545
546 list->reloc_bos =
547 anv_device_alloc(device, list->array_length * sizeof(*list->reloc_bos), 8,
548 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
549
550 if (list->relocs == NULL) {
551 anv_device_free(device, list->relocs);
552 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
553 }
554
555 return VK_SUCCESS;
556}
557
Jason Ekstrand59def432015-05-27 11:41:28 -0700558void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700559anv_reloc_list_finish(struct anv_reloc_list *list, struct anv_device *device)
560{
561 anv_device_free(device, list->relocs);
562 anv_device_free(device, list->reloc_bos);
563}
564
565static VkResult
566anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
567 size_t num_additional_relocs)
568{
569 if (list->num_relocs + num_additional_relocs <= list->array_length)
570 return VK_SUCCESS;
571
572 size_t new_length = list->array_length * 2;
573 while (new_length < list->num_relocs + num_additional_relocs)
574 new_length *= 2;
575
576 struct drm_i915_gem_relocation_entry *new_relocs =
577 anv_device_alloc(device, new_length * sizeof(*list->relocs), 8,
578 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
579 if (new_relocs == NULL)
580 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
581
582 struct anv_bo **new_reloc_bos =
583 anv_device_alloc(device, new_length * sizeof(*list->reloc_bos), 8,
584 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
585 if (new_relocs == NULL) {
586 anv_device_free(device, new_relocs);
587 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
588 }
589
590 memcpy(new_relocs, list->relocs, list->num_relocs * sizeof(*list->relocs));
591 memcpy(new_reloc_bos, list->reloc_bos,
592 list->num_relocs * sizeof(*list->reloc_bos));
593
594 anv_device_free(device, list->relocs);
595 anv_device_free(device, list->reloc_bos);
596
597 list->relocs = new_relocs;
598 list->reloc_bos = new_reloc_bos;
599
600 return VK_SUCCESS;
601}
602
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700603static VkResult
604anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700605{
606 VkResult result;
607
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700608 struct anv_batch_bo *bbo =
609 anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
610 if (bbo == NULL)
611 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700612
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700613 bbo->num_relocs = 0;
614 bbo->prev_batch_bo = NULL;
615
616 result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700617 if (result != VK_SUCCESS) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700618 anv_device_free(device, bbo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700619 return result;
620 }
621
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700622 *bbo_out = bbo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700623
624 return VK_SUCCESS;
625}
626
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700627static void
628anv_batch_bo_start(struct anv_batch_bo *bbo, struct anv_batch *batch,
629 size_t batch_padding)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700630{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700631 batch->next = batch->start = bbo->bo.map;
632 batch->end = bbo->bo.map + bbo->bo.size - batch_padding;
633 bbo->first_reloc = batch->relocs.num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700634}
635
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700636static void
637anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700638{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700639 assert(batch->start == bbo->bo.map);
640 bbo->length = batch->next - batch->start;
Jason Ekstrand9cae3d12015-06-09 21:36:12 -0700641 VG(VALGRIND_CHECK_MEM_IS_DEFINED(batch->start, bbo->length));
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700642 bbo->num_relocs = batch->relocs.num_relocs - bbo->first_reloc;
643}
644
645static void
646anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_device *device)
647{
648 anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
649 anv_device_free(device, bbo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700650}
651
652void *
653anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords)
654{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700655 if (batch->next + num_dwords * 4 > batch->end)
656 batch->extend_cb(batch, batch->user_data);
657
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700658 void *p = batch->next;
659
660 batch->next += num_dwords * 4;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700661 assert(batch->next <= batch->end);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700662
663 return p;
664}
665
666static void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700667anv_reloc_list_append(struct anv_reloc_list *list, struct anv_device *device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700668 struct anv_reloc_list *other, uint32_t offset)
669{
Jason Ekstrand403266b2015-05-25 17:38:15 -0700670 anv_reloc_list_grow(list, device, other->num_relocs);
671 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700672
Jason Ekstrand403266b2015-05-25 17:38:15 -0700673 memcpy(&list->relocs[list->num_relocs], &other->relocs[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700674 other->num_relocs * sizeof(other->relocs[0]));
Jason Ekstrand403266b2015-05-25 17:38:15 -0700675 memcpy(&list->reloc_bos[list->num_relocs], &other->reloc_bos[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700676 other->num_relocs * sizeof(other->reloc_bos[0]));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700677
Jason Ekstrand403266b2015-05-25 17:38:15 -0700678 for (uint32_t i = 0; i < other->num_relocs; i++)
679 list->relocs[i + list->num_relocs].offset += offset;
680
681 list->num_relocs += other->num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700682}
683
684static uint64_t
Jason Ekstrand403266b2015-05-25 17:38:15 -0700685anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
686 uint32_t offset, struct anv_bo *target_bo, uint32_t delta)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700687{
688 struct drm_i915_gem_relocation_entry *entry;
689 int index;
690
Jason Ekstrand403266b2015-05-25 17:38:15 -0700691 anv_reloc_list_grow(list, device, 1);
692 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700693
694 /* XXX: Can we use I915_EXEC_HANDLE_LUT? */
695 index = list->num_relocs++;
696 list->reloc_bos[index] = target_bo;
697 entry = &list->relocs[index];
698 entry->target_handle = target_bo->gem_handle;
699 entry->delta = delta;
700 entry->offset = offset;
701 entry->presumed_offset = target_bo->offset;
702 entry->read_domains = 0;
703 entry->write_domain = 0;
704
705 return target_bo->offset + delta;
706}
707
708void
709anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
710{
711 uint32_t size, offset;
712
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700713 size = other->next - other->start;
714 assert(size % 4 == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700715
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700716 if (batch->next + size > batch->end)
717 batch->extend_cb(batch, batch->user_data);
718
719 assert(batch->next + size <= batch->end);
720
721 memcpy(batch->next, other->start, size);
722
723 offset = batch->next - batch->start;
724 anv_reloc_list_append(&batch->relocs, batch->device,
725 &other->relocs, offset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700726
727 batch->next += size;
728}
729
730uint64_t
731anv_batch_emit_reloc(struct anv_batch *batch,
732 void *location, struct anv_bo *bo, uint32_t delta)
733{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700734 return anv_reloc_list_add(&batch->relocs, batch->device,
735 location - batch->start, bo, delta);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700736}
737
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700738VkResult anv_QueueSubmit(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700739 VkQueue _queue,
740 uint32_t cmdBufferCount,
741 const VkCmdBuffer* pCmdBuffers,
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700742 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700743{
744 struct anv_queue *queue = (struct anv_queue *) _queue;
745 struct anv_device *device = queue->device;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700746 struct anv_fence *fence = (struct anv_fence *) _fence;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700747 int ret;
748
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700749 for (uint32_t i = 0; i < cmdBufferCount; i++) {
750 struct anv_cmd_buffer *cmd_buffer =
751 (struct anv_cmd_buffer *) pCmdBuffers[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700752
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700753 if (device->dump_aub)
754 anv_cmd_buffer_dump(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700755
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700756 if (!device->no_hw) {
757 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf);
758 if (ret != 0)
Kristian Høgsberg2b7a0602015-05-12 14:38:58 -0700759 return vk_error(VK_ERROR_UNKNOWN);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700760
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700761 if (fence) {
762 ret = anv_gem_execbuffer(device, &fence->execbuf);
763 if (ret != 0)
764 return vk_error(VK_ERROR_UNKNOWN);
765 }
766
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700767 for (uint32_t i = 0; i < cmd_buffer->bo_count; i++)
768 cmd_buffer->exec2_bos[i]->offset = cmd_buffer->exec2_objects[i].offset;
769 } else {
770 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
771 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700772 }
773
774 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700775}
776
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700777VkResult anv_QueueWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700778 VkQueue _queue)
779{
780 struct anv_queue *queue = (struct anv_queue *) _queue;
781
782 return vkDeviceWaitIdle((VkDevice) queue->device);
783}
784
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700785VkResult anv_DeviceWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700786 VkDevice _device)
787{
788 struct anv_device *device = (struct anv_device *) _device;
789 struct anv_state state;
790 struct anv_batch batch;
791 struct drm_i915_gem_execbuffer2 execbuf;
792 struct drm_i915_gem_exec_object2 exec2_objects[1];
793 struct anv_bo *bo = NULL;
794 VkResult result;
795 int64_t timeout;
796 int ret;
797
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700798 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
799 bo = &device->dynamic_state_pool.block_pool->bo;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700800 batch.start = batch.next = state.map;
801 batch.end = state.map + 32;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700802 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
803 anv_batch_emit(&batch, GEN8_MI_NOOP);
804
805 exec2_objects[0].handle = bo->gem_handle;
806 exec2_objects[0].relocation_count = 0;
807 exec2_objects[0].relocs_ptr = 0;
808 exec2_objects[0].alignment = 0;
809 exec2_objects[0].offset = bo->offset;
810 exec2_objects[0].flags = 0;
811 exec2_objects[0].rsvd1 = 0;
812 exec2_objects[0].rsvd2 = 0;
813
814 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
815 execbuf.buffer_count = 1;
816 execbuf.batch_start_offset = state.offset;
817 execbuf.batch_len = batch.next - state.map;
818 execbuf.cliprects_ptr = 0;
819 execbuf.num_cliprects = 0;
820 execbuf.DR1 = 0;
821 execbuf.DR4 = 0;
822
823 execbuf.flags =
824 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
825 execbuf.rsvd1 = device->context_id;
826 execbuf.rsvd2 = 0;
827
828 if (!device->no_hw) {
829 ret = anv_gem_execbuffer(device, &execbuf);
830 if (ret != 0) {
831 result = vk_error(VK_ERROR_UNKNOWN);
832 goto fail;
833 }
834
835 timeout = INT64_MAX;
836 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
837 if (ret != 0) {
838 result = vk_error(VK_ERROR_UNKNOWN);
839 goto fail;
840 }
841 }
842
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700843 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700844
845 return VK_SUCCESS;
846
847 fail:
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700848 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700849
850 return result;
851}
852
853void *
854anv_device_alloc(struct anv_device * device,
855 size_t size,
856 size_t alignment,
857 VkSystemAllocType allocType)
858{
859 return device->instance->pfnAlloc(device->instance->pAllocUserData,
860 size,
861 alignment,
862 allocType);
863}
864
865void
866anv_device_free(struct anv_device * device,
867 void * mem)
868{
869 return device->instance->pfnFree(device->instance->pAllocUserData,
870 mem);
871}
872
873VkResult
874anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
875{
876 bo->gem_handle = anv_gem_create(device, size);
877 if (!bo->gem_handle)
878 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
879
880 bo->map = NULL;
881 bo->index = 0;
882 bo->offset = 0;
883 bo->size = size;
884
885 return VK_SUCCESS;
886}
887
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700888VkResult anv_AllocMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700889 VkDevice _device,
890 const VkMemoryAllocInfo* pAllocInfo,
891 VkDeviceMemory* pMem)
892{
893 struct anv_device *device = (struct anv_device *) _device;
894 struct anv_device_memory *mem;
895 VkResult result;
896
897 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
898
899 mem = anv_device_alloc(device, sizeof(*mem), 8,
900 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
901 if (mem == NULL)
902 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
903
904 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
905 if (result != VK_SUCCESS)
906 goto fail;
907
908 *pMem = (VkDeviceMemory) mem;
909
910 return VK_SUCCESS;
911
912 fail:
913 anv_device_free(device, mem);
914
915 return result;
916}
917
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700918VkResult anv_FreeMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700919 VkDevice _device,
920 VkDeviceMemory _mem)
921{
922 struct anv_device *device = (struct anv_device *) _device;
923 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
924
925 if (mem->bo.map)
926 anv_gem_munmap(mem->bo.map, mem->bo.size);
927
928 if (mem->bo.gem_handle != 0)
929 anv_gem_close(device, mem->bo.gem_handle);
930
931 anv_device_free(device, mem);
932
933 return VK_SUCCESS;
934}
935
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700936VkResult anv_MapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700937 VkDevice _device,
938 VkDeviceMemory _mem,
939 VkDeviceSize offset,
940 VkDeviceSize size,
941 VkMemoryMapFlags flags,
942 void** ppData)
943{
944 struct anv_device *device = (struct anv_device *) _device;
945 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
946
947 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
948 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
949 * at a time is valid. We could just mmap up front and return an offset
950 * pointer here, but that may exhaust virtual memory on 32 bit
951 * userspace. */
952
953 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
954 mem->map_size = size;
955
956 *ppData = mem->map;
957
958 return VK_SUCCESS;
959}
960
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700961VkResult anv_UnmapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700962 VkDevice _device,
963 VkDeviceMemory _mem)
964{
965 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
966
967 anv_gem_munmap(mem->map, mem->map_size);
968
969 return VK_SUCCESS;
970}
971
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700972VkResult anv_FlushMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700973 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700974 uint32_t memRangeCount,
975 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700976{
977 /* clflush here for !llc platforms */
978
979 return VK_SUCCESS;
980}
981
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700982VkResult anv_InvalidateMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700983 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700984 uint32_t memRangeCount,
985 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700986{
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700987 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700988}
989
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700990VkResult anv_DestroyObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700991 VkDevice _device,
992 VkObjectType objType,
Jason Ekstrand57153da2015-05-22 15:15:08 -0700993 VkObject _object)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700994{
995 struct anv_device *device = (struct anv_device *) _device;
Jason Ekstrand57153da2015-05-22 15:15:08 -0700996 struct anv_object *object = (struct anv_object *) _object;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700997
Jason Ekstrand57153da2015-05-22 15:15:08 -0700998 switch (objType) {
999 case VK_OBJECT_TYPE_INSTANCE:
1000 return anv_DestroyInstance((VkInstance) _object);
1001
1002 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
1003 /* We don't want to actually destroy physical devices */
1004 return VK_SUCCESS;
1005
1006 case VK_OBJECT_TYPE_DEVICE:
1007 assert(_device == (VkDevice) _object);
1008 return anv_DestroyDevice((VkDevice) _object);
1009
1010 case VK_OBJECT_TYPE_QUEUE:
1011 /* TODO */
1012 return VK_SUCCESS;
1013
1014 case VK_OBJECT_TYPE_DEVICE_MEMORY:
1015 return anv_FreeMemory(_device, (VkDeviceMemory) _object);
1016
1017 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
1018 /* These are just dummys anyway, so we don't need to destroy them */
1019 return VK_SUCCESS;
1020
1021 case VK_OBJECT_TYPE_BUFFER:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001022 case VK_OBJECT_TYPE_IMAGE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001023 case VK_OBJECT_TYPE_DEPTH_STENCIL_VIEW:
1024 case VK_OBJECT_TYPE_SHADER:
1025 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
1026 case VK_OBJECT_TYPE_SAMPLER:
1027 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
1028 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
1029 case VK_OBJECT_TYPE_DYNAMIC_RS_STATE:
1030 case VK_OBJECT_TYPE_DYNAMIC_CB_STATE:
1031 case VK_OBJECT_TYPE_DYNAMIC_DS_STATE:
1032 case VK_OBJECT_TYPE_RENDER_PASS:
1033 /* These are trivially destroyable */
1034 anv_device_free(device, (void *) _object);
1035 return VK_SUCCESS;
1036
1037 case VK_OBJECT_TYPE_COMMAND_BUFFER:
1038 case VK_OBJECT_TYPE_PIPELINE:
1039 case VK_OBJECT_TYPE_DYNAMIC_VP_STATE:
1040 case VK_OBJECT_TYPE_FENCE:
1041 case VK_OBJECT_TYPE_QUERY_POOL:
1042 case VK_OBJECT_TYPE_FRAMEBUFFER:
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001043 case VK_OBJECT_TYPE_BUFFER_VIEW:
1044 case VK_OBJECT_TYPE_IMAGE_VIEW:
1045 case VK_OBJECT_TYPE_COLOR_ATTACHMENT_VIEW:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001046 (object->destructor)(device, object, objType);
1047 return VK_SUCCESS;
1048
1049 case VK_OBJECT_TYPE_SEMAPHORE:
1050 case VK_OBJECT_TYPE_EVENT:
1051 stub_return(VK_UNSUPPORTED);
1052
1053 default:
1054 unreachable("Invalid object type");
1055 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001056}
1057
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001058VkResult anv_GetObjectMemoryRequirements(
1059 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001060 VkObjectType objType,
1061 VkObject object,
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001062 VkMemoryRequirements* pMemoryRequirements)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001063{
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001064 pMemoryRequirements->memPropsAllowed =
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001065 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
Jason Ekstrand68fa7502015-07-06 17:32:28 -07001066 /* VK_MEMORY_PROPERTY_HOST_NON_COHERENT_BIT | */
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001067 /* VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT | */
Jason Ekstrand65f9ccb2015-07-06 17:33:43 -07001068 VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001069
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001070 pMemoryRequirements->memPropsRequired = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001071
1072 switch (objType) {
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001073 case VK_OBJECT_TYPE_BUFFER: {
1074 struct anv_buffer *buffer = (struct anv_buffer *) object;
1075 pMemoryRequirements->size = buffer->size;
1076 pMemoryRequirements->alignment = 16;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001077 break;
1078 }
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001079 case VK_OBJECT_TYPE_IMAGE: {
1080 struct anv_image *image = (struct anv_image *) object;
1081 pMemoryRequirements->size = image->size;
1082 pMemoryRequirements->alignment = image->alignment;
1083 break;
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001084 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001085 default:
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001086 pMemoryRequirements->size = 0;
1087 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001088 }
1089
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001090 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001091}
1092
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001093VkResult anv_BindObjectMemory(
1094 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001095 VkObjectType objType,
1096 VkObject object,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001097 VkDeviceMemory _mem,
1098 VkDeviceSize memOffset)
1099{
1100 struct anv_buffer *buffer;
1101 struct anv_image *image;
1102 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1103
1104 switch (objType) {
1105 case VK_OBJECT_TYPE_BUFFER:
1106 buffer = (struct anv_buffer *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001107 buffer->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001108 buffer->offset = memOffset;
1109 break;
1110 case VK_OBJECT_TYPE_IMAGE:
1111 image = (struct anv_image *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001112 image->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001113 image->offset = memOffset;
1114 break;
1115 default:
1116 break;
1117 }
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001118
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001119 return VK_SUCCESS;
1120}
1121
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001122VkResult anv_QueueBindSparseBufferMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001123 VkQueue queue,
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001124 VkBuffer buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001125 VkDeviceSize rangeOffset,
1126 VkDeviceSize rangeSize,
1127 VkDeviceMemory mem,
1128 VkDeviceSize memOffset)
1129{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001130 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001131}
1132
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001133VkResult anv_QueueBindSparseImageMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001134 VkQueue queue,
1135 VkImage image,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001136 const VkImageMemoryBindInfo* pBindInfo,
1137 VkDeviceMemory mem,
1138 VkDeviceSize memOffset)
1139{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001140 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001141}
1142
Jason Ekstrand57153da2015-05-22 15:15:08 -07001143static void
1144anv_fence_destroy(struct anv_device *device,
1145 struct anv_object *object,
1146 VkObjectType obj_type)
1147{
1148 struct anv_fence *fence = (struct anv_fence *) object;
1149
1150 assert(obj_type == VK_OBJECT_TYPE_FENCE);
1151
1152 anv_gem_munmap(fence->bo.map, fence->bo.size);
1153 anv_gem_close(device, fence->bo.gem_handle);
1154 anv_device_free(device, fence);
1155}
1156
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001157VkResult anv_CreateFence(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001158 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001159 const VkFenceCreateInfo* pCreateInfo,
1160 VkFence* pFence)
1161{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001162 struct anv_device *device = (struct anv_device *) _device;
1163 struct anv_fence *fence;
1164 struct anv_batch batch;
1165 VkResult result;
1166
1167 const uint32_t fence_size = 128;
1168
1169 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1170
1171 fence = anv_device_alloc(device, sizeof(*fence), 8,
1172 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1173 if (fence == NULL)
1174 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1175
1176 result = anv_bo_init_new(&fence->bo, device, fence_size);
1177 if (result != VK_SUCCESS)
1178 goto fail;
1179
Jason Ekstrand57153da2015-05-22 15:15:08 -07001180 fence->base.destructor = anv_fence_destroy;
1181
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001182 fence->bo.map =
1183 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07001184 batch.next = batch.start = fence->bo.map;
1185 batch.end = fence->bo.map + fence->bo.size;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001186 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1187 anv_batch_emit(&batch, GEN8_MI_NOOP);
1188
1189 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1190 fence->exec2_objects[0].relocation_count = 0;
1191 fence->exec2_objects[0].relocs_ptr = 0;
1192 fence->exec2_objects[0].alignment = 0;
1193 fence->exec2_objects[0].offset = fence->bo.offset;
1194 fence->exec2_objects[0].flags = 0;
1195 fence->exec2_objects[0].rsvd1 = 0;
1196 fence->exec2_objects[0].rsvd2 = 0;
1197
1198 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1199 fence->execbuf.buffer_count = 1;
1200 fence->execbuf.batch_start_offset = 0;
1201 fence->execbuf.batch_len = batch.next - fence->bo.map;
1202 fence->execbuf.cliprects_ptr = 0;
1203 fence->execbuf.num_cliprects = 0;
1204 fence->execbuf.DR1 = 0;
1205 fence->execbuf.DR4 = 0;
1206
1207 fence->execbuf.flags =
1208 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1209 fence->execbuf.rsvd1 = device->context_id;
1210 fence->execbuf.rsvd2 = 0;
1211
Chad Versace87d98e12015-06-04 14:31:53 -07001212 *pFence = (VkFence) fence;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001213
1214 return VK_SUCCESS;
1215
1216 fail:
1217 anv_device_free(device, fence);
1218
1219 return result;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001220}
1221
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001222VkResult anv_ResetFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001223 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001224 uint32_t fenceCount,
Jason Ekstrandd5349b12015-07-07 17:18:00 -07001225 const VkFence* pFences)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001226{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001227 struct anv_fence **fences = (struct anv_fence **) pFences;
1228
Kristian Høgsberg Kristensen52637c02015-06-05 11:51:30 -07001229 for (uint32_t i = 0; i < fenceCount; i++)
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001230 fences[i]->ready = false;
1231
1232 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001233}
1234
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001235VkResult anv_GetFenceStatus(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001236 VkDevice _device,
1237 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001238{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001239 struct anv_device *device = (struct anv_device *) _device;
1240 struct anv_fence *fence = (struct anv_fence *) _fence;
1241 int64_t t = 0;
1242 int ret;
1243
1244 if (fence->ready)
1245 return VK_SUCCESS;
1246
1247 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1248 if (ret == 0) {
1249 fence->ready = true;
1250 return VK_SUCCESS;
1251 }
1252
1253 return VK_NOT_READY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001254}
1255
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001256VkResult anv_WaitForFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001257 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001258 uint32_t fenceCount,
1259 const VkFence* pFences,
1260 bool32_t waitAll,
1261 uint64_t timeout)
1262{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001263 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001264 int64_t t = timeout;
1265 int ret;
1266
1267 /* FIXME: handle !waitAll */
1268
1269 for (uint32_t i = 0; i < fenceCount; i++) {
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001270 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1271 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001272 if (ret == -1 && errno == ETIME)
1273 return VK_TIMEOUT;
1274 else if (ret == -1)
1275 return vk_error(VK_ERROR_UNKNOWN);
1276 }
1277
1278 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001279}
1280
1281// Queue semaphore functions
1282
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001283VkResult anv_CreateSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001284 VkDevice device,
1285 const VkSemaphoreCreateInfo* pCreateInfo,
1286 VkSemaphore* pSemaphore)
1287{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001288 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001289}
1290
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001291VkResult anv_QueueSignalSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001292 VkQueue queue,
1293 VkSemaphore semaphore)
1294{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001295 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001296}
1297
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001298VkResult anv_QueueWaitSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001299 VkQueue queue,
1300 VkSemaphore semaphore)
1301{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001302 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001303}
1304
1305// Event functions
1306
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001307VkResult anv_CreateEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001308 VkDevice device,
1309 const VkEventCreateInfo* pCreateInfo,
1310 VkEvent* pEvent)
1311{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001312 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001313}
1314
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001315VkResult anv_GetEventStatus(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001316 VkDevice device,
1317 VkEvent event)
1318{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001319 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001320}
1321
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001322VkResult anv_SetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001323 VkDevice device,
1324 VkEvent event)
1325{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001326 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001327}
1328
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001329VkResult anv_ResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001330 VkDevice device,
1331 VkEvent event)
1332{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001333 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001334}
1335
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001336// Buffer functions
1337
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001338VkResult anv_CreateBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001339 VkDevice _device,
1340 const VkBufferCreateInfo* pCreateInfo,
1341 VkBuffer* pBuffer)
1342{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001343 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001344 struct anv_buffer *buffer;
1345
1346 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1347
1348 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1349 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1350 if (buffer == NULL)
1351 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1352
1353 buffer->size = pCreateInfo->size;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001354 buffer->bo = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001355 buffer->offset = 0;
1356
1357 *pBuffer = (VkBuffer) buffer;
1358
1359 return VK_SUCCESS;
1360}
1361
1362// Buffer view functions
1363
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001364static void
1365fill_buffer_surface_state(void *state, VkFormat format,
1366 uint32_t offset, uint32_t range)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001367{
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001368 const struct anv_format *info;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001369
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001370 info = anv_format_for_vk_format(format);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001371 /* This assumes RGBA float format. */
1372 uint32_t stride = 4;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001373 uint32_t num_elements = range / stride;
1374
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001375 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1376 .SurfaceType = SURFTYPE_BUFFER,
1377 .SurfaceArray = false,
Chad Versace4c814632015-06-25 18:18:06 -07001378 .SurfaceFormat = info->surface_format,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001379 .SurfaceVerticalAlignment = VALIGN4,
1380 .SurfaceHorizontalAlignment = HALIGN4,
1381 .TileMode = LINEAR,
1382 .VerticalLineStride = 0,
1383 .VerticalLineStrideOffset = 0,
1384 .SamplerL2BypassModeDisable = true,
1385 .RenderCacheReadWriteMode = WriteOnlyCache,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07001386 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001387 .BaseMipLevel = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001388 .SurfaceQPitch = 0,
1389 .Height = (num_elements >> 7) & 0x3fff,
1390 .Width = num_elements & 0x7f,
1391 .Depth = (num_elements >> 21) & 0x3f,
1392 .SurfacePitch = stride - 1,
1393 .MinimumArrayElement = 0,
1394 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1395 .XOffset = 0,
1396 .YOffset = 0,
1397 .SurfaceMinLOD = 0,
1398 .MIPCountLOD = 0,
1399 .AuxiliarySurfaceMode = AUX_NONE,
1400 .RedClearColor = 0,
1401 .GreenClearColor = 0,
1402 .BlueClearColor = 0,
1403 .AlphaClearColor = 0,
1404 .ShaderChannelSelectRed = SCS_RED,
1405 .ShaderChannelSelectGreen = SCS_GREEN,
1406 .ShaderChannelSelectBlue = SCS_BLUE,
1407 .ShaderChannelSelectAlpha = SCS_ALPHA,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001408 .ResourceMinLOD = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001409 /* FIXME: We assume that the image must be bound at this time. */
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001410 .SurfaceBaseAddress = { NULL, offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001411 };
1412
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001413 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1414}
1415
1416VkResult anv_CreateBufferView(
1417 VkDevice _device,
1418 const VkBufferViewCreateInfo* pCreateInfo,
1419 VkBufferView* pView)
1420{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001421 ANV_FROM_HANDLE(anv_device, device, _device);
1422 ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001423 struct anv_surface_view *view;
1424
1425 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1426
1427 view = anv_device_alloc(device, sizeof(*view), 8,
1428 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1429 if (view == NULL)
1430 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1431
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001432 view->base.destructor = anv_surface_view_destroy;
1433
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001434 view->bo = buffer->bo;
1435 view->offset = buffer->offset + pCreateInfo->offset;
1436 view->surface_state =
1437 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1438 view->format = pCreateInfo->format;
1439 view->range = pCreateInfo->range;
1440
1441 fill_buffer_surface_state(view->surface_state.map,
1442 pCreateInfo->format, view->offset, pCreateInfo->range);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001443
Chad Versace87d98e12015-06-04 14:31:53 -07001444 *pView = (VkBufferView) view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001445
1446 return VK_SUCCESS;
1447}
1448
1449// Sampler functions
1450
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001451VkResult anv_CreateSampler(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001452 VkDevice _device,
1453 const VkSamplerCreateInfo* pCreateInfo,
1454 VkSampler* pSampler)
1455{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001456 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001457 struct anv_sampler *sampler;
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001458 uint32_t mag_filter, min_filter, max_anisotropy;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001459
Kristian Høgsberg18acfa72015-05-13 13:53:01 -07001460 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001461
1462 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1463 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1464 if (!sampler)
1465 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1466
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001467 static const uint32_t vk_to_gen_tex_filter[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001468 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1469 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001470 };
1471
1472 static const uint32_t vk_to_gen_mipmap_mode[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001473 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1474 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1475 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001476 };
1477
1478 static const uint32_t vk_to_gen_tex_address[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001479 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1480 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1481 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1482 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1483 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001484 };
1485
1486 static const uint32_t vk_to_gen_compare_op[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001487 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1488 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1489 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1490 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1491 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1492 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1493 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1494 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001495 };
1496
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001497 if (pCreateInfo->maxAnisotropy > 1) {
1498 mag_filter = MAPFILTER_ANISOTROPIC;
1499 min_filter = MAPFILTER_ANISOTROPIC;
1500 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1501 } else {
1502 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1503 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1504 max_anisotropy = RATIO21;
1505 }
1506
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001507 struct GEN8_SAMPLER_STATE sampler_state = {
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001508 .SamplerDisable = false,
1509 .TextureBorderColorMode = DX10OGL,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001510 .LODPreClampMode = 0,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001511 .BaseMipLevel = 0.0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001512 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001513 .MagModeFilter = mag_filter,
1514 .MinModeFilter = min_filter,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001515 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1516 .AnisotropicAlgorithm = EWAApproximation,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001517 .MinLOD = pCreateInfo->minLod,
1518 .MaxLOD = pCreateInfo->maxLod,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001519 .ChromaKeyEnable = 0,
1520 .ChromaKeyIndex = 0,
1521 .ChromaKeyMode = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001522 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001523 .CubeSurfaceControlMode = 0,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001524
1525 .IndirectStatePointer =
Jason Ekstrand522ab832015-07-08 11:44:52 -07001526 device->border_colors.offset +
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001527 pCreateInfo->borderColor * sizeof(float) * 4,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001528
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001529 .LODClampMagnificationMode = MIPNONE,
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001530 .MaximumAnisotropy = max_anisotropy,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001531 .RAddressMinFilterRoundingEnable = 0,
1532 .RAddressMagFilterRoundingEnable = 0,
1533 .VAddressMinFilterRoundingEnable = 0,
1534 .VAddressMagFilterRoundingEnable = 0,
1535 .UAddressMinFilterRoundingEnable = 0,
1536 .UAddressMagFilterRoundingEnable = 0,
1537 .TrilinearFilterQuality = 0,
1538 .NonnormalizedCoordinateEnable = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001539 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1540 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1541 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001542 };
1543
1544 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1545
1546 *pSampler = (VkSampler) sampler;
1547
1548 return VK_SUCCESS;
1549}
1550
1551// Descriptor set functions
1552
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001553VkResult anv_CreateDescriptorSetLayout(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001554 VkDevice _device,
1555 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1556 VkDescriptorSetLayout* pSetLayout)
1557{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001558 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001559 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001560
1561 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1562
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001563 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1564 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001565 uint32_t num_dynamic_buffers = 0;
1566 uint32_t count = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07001567 uint32_t stages = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001568 uint32_t s;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001569
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001570 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001571 switch (pCreateInfo->pBinding[i].descriptorType) {
1572 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001573 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001574 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001575 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001576 break;
1577 default:
1578 break;
1579 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001580
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001581 switch (pCreateInfo->pBinding[i].descriptorType) {
1582 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001583 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1584 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1585 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1586 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1587 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1588 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1589 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1590 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001591 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001592 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001593 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001594 default:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001595 break;
1596 }
1597
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001598 switch (pCreateInfo->pBinding[i].descriptorType) {
1599 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1600 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Jason Ekstrand63c11902015-07-06 17:43:58 -07001601 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001602 break;
1603 default:
1604 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001605 }
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001606
Jason Ekstrand22513052015-05-30 10:07:29 -07001607 stages |= pCreateInfo->pBinding[i].stageFlags;
Jason Ekstrand63c11902015-07-06 17:43:58 -07001608 count += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001609 }
1610
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001611 uint32_t sampler_total = 0;
1612 uint32_t surface_total = 0;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001613 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001614 sampler_total += sampler_count[s];
1615 surface_total += surface_count[s];
1616 }
1617
1618 size_t size = sizeof(*set_layout) +
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001619 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001620 set_layout = anv_device_alloc(device, size, 8,
1621 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1622 if (!set_layout)
1623 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1624
1625 set_layout->num_dynamic_buffers = num_dynamic_buffers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001626 set_layout->count = count;
Jason Ekstrand22513052015-05-30 10:07:29 -07001627 set_layout->shader_stages = stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001628
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001629 struct anv_descriptor_slot *p = set_layout->entries;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001630 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1631 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1632 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001633 set_layout->stage[s].surface_count = surface_count[s];
1634 set_layout->stage[s].surface_start = surface[s] = p;
1635 p += surface_count[s];
1636 set_layout->stage[s].sampler_count = sampler_count[s];
1637 set_layout->stage[s].sampler_start = sampler[s] = p;
1638 p += sampler_count[s];
1639 }
1640
1641 uint32_t descriptor = 0;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001642 int8_t dynamic_slot = 0;
1643 bool is_dynamic;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001644 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1645 switch (pCreateInfo->pBinding[i].descriptorType) {
1646 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001647 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1648 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001649 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001650 sampler[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001651 sampler[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001652 sampler[s]++;
1653 }
1654 break;
1655 default:
1656 break;
1657 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001658
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001659 switch (pCreateInfo->pBinding[i].descriptorType) {
1660 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1661 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001662 is_dynamic = true;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001663 break;
1664 default:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001665 is_dynamic = false;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001666 break;
1667 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001668
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001669 switch (pCreateInfo->pBinding[i].descriptorType) {
1670 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001671 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1672 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1673 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1674 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1675 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1676 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1677 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1678 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1679 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001680 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001681 surface[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001682 if (is_dynamic)
1683 surface[s]->dynamic_slot = dynamic_slot + j;
1684 else
1685 surface[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001686 surface[s]++;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001687 }
1688 break;
1689 default:
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001690 break;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001691 }
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001692
1693 if (is_dynamic)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001694 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001695
Jason Ekstrand63c11902015-07-06 17:43:58 -07001696 descriptor += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001697 }
1698
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001699 *pSetLayout = (VkDescriptorSetLayout) set_layout;
1700
1701 return VK_SUCCESS;
1702}
1703
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001704VkResult anv_CreateDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001705 VkDevice device,
1706 VkDescriptorPoolUsage poolUsage,
1707 uint32_t maxSets,
1708 const VkDescriptorPoolCreateInfo* pCreateInfo,
1709 VkDescriptorPool* pDescriptorPool)
1710{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001711 *pDescriptorPool = 1;
1712
1713 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001714}
1715
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001716VkResult anv_ResetDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001717 VkDevice device,
1718 VkDescriptorPool descriptorPool)
1719{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001720 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001721}
1722
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001723VkResult anv_AllocDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001724 VkDevice _device,
1725 VkDescriptorPool descriptorPool,
1726 VkDescriptorSetUsage setUsage,
1727 uint32_t count,
1728 const VkDescriptorSetLayout* pSetLayouts,
1729 VkDescriptorSet* pDescriptorSets,
1730 uint32_t* pCount)
1731{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001732 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001733 struct anv_descriptor_set *set;
1734 size_t size;
1735
1736 for (uint32_t i = 0; i < count; i++) {
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001737 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
Kristian Høgsberga77229c2015-05-13 11:49:30 -07001738 size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001739 set = anv_device_alloc(device, size, 8,
1740 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1741 if (!set) {
1742 *pCount = i;
1743 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1744 }
1745
Jason Ekstrand0a547512015-05-21 16:33:04 -07001746 /* Descriptor sets may not be 100% filled out so we need to memset to
1747 * ensure that we can properly detect and handle holes.
1748 */
1749 memset(set, 0, size);
1750
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001751 pDescriptorSets[i] = (VkDescriptorSet) set;
1752 }
1753
1754 *pCount = count;
1755
Kristian Høgsbergb4b3bd12015-05-17 18:39:12 -07001756 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001757}
1758
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001759void anv_UpdateDescriptors(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001760 VkDevice _device,
1761 VkDescriptorSet descriptorSet,
1762 uint32_t updateCount,
1763 const void** ppUpdateArray)
1764{
1765 struct anv_descriptor_set *set = (struct anv_descriptor_set *) descriptorSet;
1766 VkUpdateSamplers *update_samplers;
1767 VkUpdateSamplerTextures *update_sampler_textures;
1768 VkUpdateImages *update_images;
1769 VkUpdateBuffers *update_buffers;
1770 VkUpdateAsCopy *update_as_copy;
1771
1772 for (uint32_t i = 0; i < updateCount; i++) {
1773 const struct anv_common *common = ppUpdateArray[i];
1774
1775 switch (common->sType) {
1776 case VK_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1777 update_samplers = (VkUpdateSamplers *) common;
1778
1779 for (uint32_t j = 0; j < update_samplers->count; j++) {
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001780 set->descriptors[update_samplers->binding + j].sampler =
1781 (struct anv_sampler *) update_samplers->pSamplers[j];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001782 }
1783 break;
1784
1785 case VK_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1786 /* FIXME: Shouldn't this be *_UPDATE_SAMPLER_IMAGES? */
1787 update_sampler_textures = (VkUpdateSamplerTextures *) common;
1788
1789 for (uint32_t j = 0; j < update_sampler_textures->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001790 set->descriptors[update_sampler_textures->binding + j].view =
1791 (struct anv_surface_view *)
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001792 update_sampler_textures->pSamplerImageViews[j].pImageView->view;
1793 set->descriptors[update_sampler_textures->binding + j].sampler =
1794 (struct anv_sampler *)
1795 update_sampler_textures->pSamplerImageViews[j].sampler;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001796 }
1797 break;
1798
1799 case VK_STRUCTURE_TYPE_UPDATE_IMAGES:
1800 update_images = (VkUpdateImages *) common;
1801
1802 for (uint32_t j = 0; j < update_images->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001803 set->descriptors[update_images->binding + j].view =
1804 (struct anv_surface_view *) update_images->pImageViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001805 }
1806 break;
1807
1808 case VK_STRUCTURE_TYPE_UPDATE_BUFFERS:
1809 update_buffers = (VkUpdateBuffers *) common;
1810
1811 for (uint32_t j = 0; j < update_buffers->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001812 set->descriptors[update_buffers->binding + j].view =
1813 (struct anv_surface_view *) update_buffers->pBufferViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001814 }
1815 /* FIXME: descriptor arrays? */
1816 break;
1817
1818 case VK_STRUCTURE_TYPE_UPDATE_AS_COPY:
1819 update_as_copy = (VkUpdateAsCopy *) common;
1820 (void) update_as_copy;
1821 break;
1822
1823 default:
1824 break;
1825 }
1826 }
1827}
1828
1829// State object functions
1830
1831static inline int64_t
1832clamp_int64(int64_t x, int64_t min, int64_t max)
1833{
1834 if (x < min)
1835 return min;
1836 else if (x < max)
1837 return x;
1838 else
1839 return max;
1840}
1841
Jason Ekstrand57153da2015-05-22 15:15:08 -07001842static void
1843anv_dynamic_vp_state_destroy(struct anv_device *device,
1844 struct anv_object *object,
1845 VkObjectType obj_type)
1846{
1847 struct anv_dynamic_vp_state *state = (void *)object;
1848
1849 assert(obj_type == VK_OBJECT_TYPE_DYNAMIC_VP_STATE);
1850
1851 anv_state_pool_free(&device->dynamic_state_pool, state->sf_clip_vp);
1852 anv_state_pool_free(&device->dynamic_state_pool, state->cc_vp);
1853 anv_state_pool_free(&device->dynamic_state_pool, state->scissor);
1854
1855 anv_device_free(device, state);
1856}
1857
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001858VkResult anv_CreateDynamicViewportState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001859 VkDevice _device,
1860 const VkDynamicVpStateCreateInfo* pCreateInfo,
1861 VkDynamicVpState* pState)
1862{
1863 struct anv_device *device = (struct anv_device *) _device;
1864 struct anv_dynamic_vp_state *state;
1865
1866 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO);
1867
1868 state = anv_device_alloc(device, sizeof(*state), 8,
1869 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1870 if (state == NULL)
1871 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1872
Jason Ekstrand57153da2015-05-22 15:15:08 -07001873 state->base.destructor = anv_dynamic_vp_state_destroy;
1874
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001875 unsigned count = pCreateInfo->viewportAndScissorCount;
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001876 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001877 count * 64, 64);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001878 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001879 count * 8, 32);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001880 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001881 count * 32, 32);
1882
1883 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1884 const VkViewport *vp = &pCreateInfo->pViewports[i];
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07001885 const VkRect2D *s = &pCreateInfo->pScissors[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001886
1887 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1888 .ViewportMatrixElementm00 = vp->width / 2,
1889 .ViewportMatrixElementm11 = vp->height / 2,
1890 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1891 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1892 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1893 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1894 .XMinClipGuardband = -1.0f,
1895 .XMaxClipGuardband = 1.0f,
1896 .YMinClipGuardband = -1.0f,
1897 .YMaxClipGuardband = 1.0f,
1898 .XMinViewPort = vp->originX,
1899 .XMaxViewPort = vp->originX + vp->width - 1,
1900 .YMinViewPort = vp->originY,
1901 .YMaxViewPort = vp->originY + vp->height - 1,
1902 };
1903
1904 struct GEN8_CC_VIEWPORT cc_viewport = {
1905 .MinimumDepth = vp->minDepth,
1906 .MaximumDepth = vp->maxDepth
1907 };
1908
1909 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1910 * ymax < ymin for empty clips. In case clip x, y, width height are all
1911 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1912 * what we want. Just special case empty clips and produce a canonical
1913 * empty clip. */
1914 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1915 .ScissorRectangleYMin = 1,
1916 .ScissorRectangleXMin = 1,
1917 .ScissorRectangleYMax = 0,
1918 .ScissorRectangleXMax = 0
1919 };
1920
1921 const int max = 0xffff;
1922 struct GEN8_SCISSOR_RECT scissor = {
1923 /* Do this math using int64_t so overflow gets clamped correctly. */
1924 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
1925 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
1926 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
1927 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
1928 };
1929
1930 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
1931 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
1932
1933 if (s->extent.width <= 0 || s->extent.height <= 0) {
1934 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
1935 } else {
1936 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
1937 }
1938 }
1939
1940 *pState = (VkDynamicVpState) state;
1941
1942 return VK_SUCCESS;
1943}
1944
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001945VkResult anv_CreateDynamicRasterState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001946 VkDevice _device,
1947 const VkDynamicRsStateCreateInfo* pCreateInfo,
1948 VkDynamicRsState* pState)
1949{
1950 struct anv_device *device = (struct anv_device *) _device;
1951 struct anv_dynamic_rs_state *state;
1952
1953 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RS_STATE_CREATE_INFO);
1954
1955 state = anv_device_alloc(device, sizeof(*state), 8,
1956 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1957 if (state == NULL)
1958 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1959
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001960 struct GEN8_3DSTATE_SF sf = {
1961 GEN8_3DSTATE_SF_header,
1962 .LineWidth = pCreateInfo->lineWidth,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001963 };
1964
1965 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
1966
Kristian Høgsberg99883772015-05-26 09:40:10 -07001967 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
1968 pCreateInfo->slopeScaledDepthBias != 0.0f;
1969 struct GEN8_3DSTATE_RASTER raster = {
1970 .GlobalDepthOffsetEnableSolid = enable_bias,
1971 .GlobalDepthOffsetEnableWireframe = enable_bias,
1972 .GlobalDepthOffsetEnablePoint = enable_bias,
1973 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
1974 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
1975 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
1976 };
1977
1978 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
1979
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001980 *pState = (VkDynamicRsState) state;
1981
1982 return VK_SUCCESS;
1983}
1984
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001985VkResult anv_CreateDynamicColorBlendState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001986 VkDevice _device,
1987 const VkDynamicCbStateCreateInfo* pCreateInfo,
1988 VkDynamicCbState* pState)
1989{
1990 struct anv_device *device = (struct anv_device *) _device;
1991 struct anv_dynamic_cb_state *state;
1992
1993 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_CB_STATE_CREATE_INFO);
1994
1995 state = anv_device_alloc(device, sizeof(*state), 8,
1996 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1997 if (state == NULL)
1998 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1999
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002000 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2001 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2002 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2003 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2004 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2005 };
2006
2007 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2008
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002009 *pState = (VkDynamicCbState) state;
2010
2011 return VK_SUCCESS;
2012}
2013
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002014VkResult anv_CreateDynamicDepthStencilState(
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002015 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002016 const VkDynamicDsStateCreateInfo* pCreateInfo,
2017 VkDynamicDsState* pState)
2018{
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002019 struct anv_device *device = (struct anv_device *) _device;
2020 struct anv_dynamic_ds_state *state;
2021
2022 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DS_STATE_CREATE_INFO);
2023
2024 state = anv_device_alloc(device, sizeof(*state), 8,
2025 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2026 if (state == NULL)
2027 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2028
2029 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2030 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2031
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002032 /* Is this what we need to do? */
2033 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2034
Jason Ekstrand251aea82015-06-03 16:59:13 -07002035 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2036 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002037
Jason Ekstrand251aea82015-06-03 16:59:13 -07002038 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2039 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002040 };
2041
2042 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2043 &wm_depth_stencil);
2044
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002045 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2046 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2047 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2048 };
2049
2050 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2051
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002052 *pState = (VkDynamicDsState) state;
2053
2054 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002055}
2056
2057// Command buffer functions
2058
Jason Ekstrand57153da2015-05-22 15:15:08 -07002059static void
2060anv_cmd_buffer_destroy(struct anv_device *device,
2061 struct anv_object *object,
2062 VkObjectType obj_type)
2063{
2064 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) object;
2065
2066 assert(obj_type == VK_OBJECT_TYPE_COMMAND_BUFFER);
2067
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002068 /* Destroy all of the batch buffers */
2069 struct anv_batch_bo *bbo = cmd_buffer->last_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002070 while (bbo) {
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002071 struct anv_batch_bo *prev = bbo->prev_batch_bo;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002072 anv_batch_bo_destroy(bbo, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002073 bbo = prev;
2074 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002075 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002076
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002077 /* Destroy all of the surface state buffers */
2078 bbo = cmd_buffer->surface_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002079 while (bbo) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002080 struct anv_batch_bo *prev = bbo->prev_batch_bo;
2081 anv_batch_bo_destroy(bbo, device);
2082 bbo = prev;
2083 }
Jason Ekstrand403266b2015-05-25 17:38:15 -07002084 anv_reloc_list_finish(&cmd_buffer->surface_relocs, device);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002085
Jason Ekstrand57153da2015-05-22 15:15:08 -07002086 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
2087 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
Jason Ekstrand57153da2015-05-22 15:15:08 -07002088 anv_device_free(device, cmd_buffer->exec2_objects);
2089 anv_device_free(device, cmd_buffer->exec2_bos);
2090 anv_device_free(device, cmd_buffer);
2091}
2092
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002093static VkResult
2094anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
2095{
2096 struct anv_cmd_buffer *cmd_buffer = _data;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002097
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002098 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->last_batch_bo;
2099
2100 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2101 if (result != VK_SUCCESS)
2102 return result;
2103
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002104 /* We set the end of the batch a little short so we would be sure we
2105 * have room for the chaining command. Since we're about to emit the
2106 * chaining command, let's set it back where it should go.
2107 */
2108 batch->end += GEN8_MI_BATCH_BUFFER_START_length * 4;
2109 assert(batch->end == old_bbo->bo.map + old_bbo->bo.size);
2110
2111 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_START,
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002112 GEN8_MI_BATCH_BUFFER_START_header,
2113 ._2ndLevelBatchBuffer = _1stlevelbatch,
2114 .AddressSpaceIndicator = ASI_PPGTT,
2115 .BatchBufferStartAddress = { &new_bbo->bo, 0 },
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002116 );
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002117
2118 /* Pad out to a 2-dword aligned boundary with zeros */
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002119 if ((uintptr_t)batch->next % 8 != 0) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002120 *(uint32_t *)batch->next = 0;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002121 batch->next += 4;
2122 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002123
2124 anv_batch_bo_finish(cmd_buffer->last_batch_bo, batch);
2125
2126 new_bbo->prev_batch_bo = old_bbo;
2127 cmd_buffer->last_batch_bo = new_bbo;
2128
2129 anv_batch_bo_start(new_bbo, batch, GEN8_MI_BATCH_BUFFER_START_length * 4);
2130
2131 return VK_SUCCESS;
2132}
2133
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002134VkResult anv_CreateCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002135 VkDevice _device,
2136 const VkCmdBufferCreateInfo* pCreateInfo,
2137 VkCmdBuffer* pCmdBuffer)
2138{
2139 struct anv_device *device = (struct anv_device *) _device;
2140 struct anv_cmd_buffer *cmd_buffer;
2141 VkResult result;
2142
Jason Ekstrande19d6be2015-07-08 10:53:32 -07002143 assert(pCreateInfo->level == VK_CMD_BUFFER_LEVEL_PRIMARY);
2144
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002145 cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
2146 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2147 if (cmd_buffer == NULL)
2148 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2149
Jason Ekstrand57153da2015-05-22 15:15:08 -07002150 cmd_buffer->base.destructor = anv_cmd_buffer_destroy;
2151
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002152 cmd_buffer->device = device;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002153 cmd_buffer->rs_state = NULL;
2154 cmd_buffer->vp_state = NULL;
Kristian Høgsberg Kristensen5744d172015-06-02 22:51:42 -07002155 cmd_buffer->cb_state = NULL;
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002156 cmd_buffer->ds_state = NULL;
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002157 memset(&cmd_buffer->state_vf, 0, sizeof(cmd_buffer->state_vf));
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002158 memset(&cmd_buffer->descriptors, 0, sizeof(cmd_buffer->descriptors));
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002159
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002160 result = anv_batch_bo_create(device, &cmd_buffer->last_batch_bo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002161 if (result != VK_SUCCESS)
2162 goto fail;
2163
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002164 result = anv_reloc_list_init(&cmd_buffer->batch.relocs, device);
2165 if (result != VK_SUCCESS)
2166 goto fail_batch_bo;
2167
2168 cmd_buffer->batch.device = device;
2169 cmd_buffer->batch.extend_cb = anv_cmd_buffer_chain_batch;
2170 cmd_buffer->batch.user_data = cmd_buffer;
2171
2172 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2173 GEN8_MI_BATCH_BUFFER_START_length * 4);
2174
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002175 result = anv_batch_bo_create(device, &cmd_buffer->surface_batch_bo);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002176 if (result != VK_SUCCESS)
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002177 goto fail_batch_relocs;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002178 cmd_buffer->surface_batch_bo->first_reloc = 0;
2179
2180 result = anv_reloc_list_init(&cmd_buffer->surface_relocs, device);
2181 if (result != VK_SUCCESS)
2182 goto fail_ss_batch_bo;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002183
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002184 /* Start surface_next at 1 so surface offset 0 is invalid. */
2185 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002186
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002187 cmd_buffer->exec2_objects = NULL;
2188 cmd_buffer->exec2_bos = NULL;
2189 cmd_buffer->exec2_array_length = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002190
2191 anv_state_stream_init(&cmd_buffer->surface_state_stream,
2192 &device->surface_state_block_pool);
Kristian Høgsberga1ec7892015-05-13 13:51:08 -07002193 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002194 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002195
2196 cmd_buffer->dirty = 0;
2197 cmd_buffer->vb_dirty = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07002198 cmd_buffer->descriptors_dirty = 0;
Jason Ekstrandae8c93e2015-05-25 17:08:11 -07002199 cmd_buffer->pipeline = NULL;
Kristian Høgsberg Kristensen5a317ef2015-05-27 21:45:23 -07002200 cmd_buffer->vp_state = NULL;
2201 cmd_buffer->rs_state = NULL;
2202 cmd_buffer->ds_state = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002203
2204 *pCmdBuffer = (VkCmdBuffer) cmd_buffer;
2205
2206 return VK_SUCCESS;
2207
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002208 fail_ss_batch_bo:
2209 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, device);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002210 fail_batch_relocs:
2211 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
2212 fail_batch_bo:
2213 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002214 fail:
2215 anv_device_free(device, cmd_buffer);
2216
2217 return result;
2218}
2219
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002220static void
2221anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer *cmd_buffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002222{
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002223 struct anv_device *device = cmd_buffer->device;
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002224 struct anv_bo *scratch_bo = NULL;
2225
2226 cmd_buffer->scratch_size = device->scratch_block_pool.size;
2227 if (cmd_buffer->scratch_size > 0)
2228 scratch_bo = &device->scratch_block_pool.bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002229
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002230 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_BASE_ADDRESS,
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002231 .GeneralStateBaseAddress = { scratch_bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002232 .GeneralStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002233 .GeneralStateBaseAddressModifyEnable = true,
2234 .GeneralStateBufferSize = 0xfffff,
2235 .GeneralStateBufferSizeModifyEnable = true,
2236
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002237 .SurfaceStateBaseAddress = { &cmd_buffer->surface_batch_bo->bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002238 .SurfaceStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002239 .SurfaceStateBaseAddressModifyEnable = true,
2240
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002241 .DynamicStateBaseAddress = { &device->dynamic_state_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002242 .DynamicStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002243 .DynamicStateBaseAddressModifyEnable = true,
2244 .DynamicStateBufferSize = 0xfffff,
2245 .DynamicStateBufferSizeModifyEnable = true,
2246
2247 .IndirectObjectBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002248 .IndirectObjectMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002249 .IndirectObjectBaseAddressModifyEnable = true,
2250 .IndirectObjectBufferSize = 0xfffff,
2251 .IndirectObjectBufferSizeModifyEnable = true,
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002252
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002253 .InstructionBaseAddress = { &device->instruction_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002254 .InstructionMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002255 .InstructionBaseAddressModifyEnable = true,
2256 .InstructionBufferSize = 0xfffff,
2257 .InstructionBuffersizeModifyEnable = true);
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002258}
2259
2260VkResult anv_BeginCommandBuffer(
2261 VkCmdBuffer cmdBuffer,
2262 const VkCmdBufferBeginInfo* pBeginInfo)
2263{
2264 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2265
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002266 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07002267 cmd_buffer->current_pipeline = UINT32_MAX;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002268
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002269 return VK_SUCCESS;
2270}
2271
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002272static VkResult
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002273anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002274 struct anv_bo *bo,
2275 struct drm_i915_gem_relocation_entry *relocs,
2276 size_t num_relocs)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002277{
2278 struct drm_i915_gem_exec_object2 *obj;
2279
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002280 if (bo->index < cmd_buffer->bo_count &&
2281 cmd_buffer->exec2_bos[bo->index] == bo)
2282 return VK_SUCCESS;
2283
2284 if (cmd_buffer->bo_count >= cmd_buffer->exec2_array_length) {
2285 uint32_t new_len = cmd_buffer->exec2_objects ?
2286 cmd_buffer->exec2_array_length * 2 : 64;
2287
2288 struct drm_i915_gem_exec_object2 *new_objects =
2289 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_objects),
2290 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2291 if (new_objects == NULL)
2292 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2293
2294 struct anv_bo **new_bos =
2295 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_bos),
2296 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2297 if (new_objects == NULL) {
2298 anv_device_free(cmd_buffer->device, new_objects);
2299 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2300 }
2301
2302 if (cmd_buffer->exec2_objects) {
2303 memcpy(new_objects, cmd_buffer->exec2_objects,
2304 cmd_buffer->bo_count * sizeof(*new_objects));
2305 memcpy(new_bos, cmd_buffer->exec2_bos,
2306 cmd_buffer->bo_count * sizeof(*new_bos));
2307 }
2308
2309 cmd_buffer->exec2_objects = new_objects;
2310 cmd_buffer->exec2_bos = new_bos;
2311 cmd_buffer->exec2_array_length = new_len;
2312 }
2313
2314 assert(cmd_buffer->bo_count < cmd_buffer->exec2_array_length);
2315
2316 bo->index = cmd_buffer->bo_count++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002317 obj = &cmd_buffer->exec2_objects[bo->index];
2318 cmd_buffer->exec2_bos[bo->index] = bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002319
2320 obj->handle = bo->gem_handle;
2321 obj->relocation_count = 0;
2322 obj->relocs_ptr = 0;
2323 obj->alignment = 0;
2324 obj->offset = bo->offset;
2325 obj->flags = 0;
2326 obj->rsvd1 = 0;
2327 obj->rsvd2 = 0;
2328
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002329 if (relocs) {
2330 obj->relocation_count = num_relocs;
2331 obj->relocs_ptr = (uintptr_t) relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002332 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002333
2334 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002335}
2336
2337static void
2338anv_cmd_buffer_add_validate_bos(struct anv_cmd_buffer *cmd_buffer,
2339 struct anv_reloc_list *list)
2340{
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002341 for (size_t i = 0; i < list->num_relocs; i++)
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002342 anv_cmd_buffer_add_bo(cmd_buffer, list->reloc_bos[i], NULL, 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002343}
2344
2345static void
2346anv_cmd_buffer_process_relocs(struct anv_cmd_buffer *cmd_buffer,
2347 struct anv_reloc_list *list)
2348{
2349 struct anv_bo *bo;
2350
2351 /* If the kernel supports I915_EXEC_NO_RELOC, it will compare offset in
2352 * struct drm_i915_gem_exec_object2 against the bos current offset and if
2353 * all bos haven't moved it will skip relocation processing alltogether.
2354 * If I915_EXEC_NO_RELOC is not supported, the kernel ignores the incoming
2355 * value of offset so we can set it either way. For that to work we need
2356 * to make sure all relocs use the same presumed offset.
2357 */
2358
2359 for (size_t i = 0; i < list->num_relocs; i++) {
2360 bo = list->reloc_bos[i];
2361 if (bo->offset != list->relocs[i].presumed_offset)
2362 cmd_buffer->need_reloc = true;
2363
2364 list->relocs[i].target_handle = bo->index;
2365 }
2366}
2367
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002368VkResult anv_EndCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002369 VkCmdBuffer cmdBuffer)
2370{
2371 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2372 struct anv_device *device = cmd_buffer->device;
2373 struct anv_batch *batch = &cmd_buffer->batch;
2374
2375 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
2376
2377 /* Round batch up to an even number of dwords. */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002378 if ((batch->next - batch->start) & 4)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002379 anv_batch_emit(batch, GEN8_MI_NOOP);
2380
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002381 anv_batch_bo_finish(cmd_buffer->last_batch_bo, &cmd_buffer->batch);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002382 cmd_buffer->surface_batch_bo->num_relocs =
2383 cmd_buffer->surface_relocs.num_relocs - cmd_buffer->surface_batch_bo->first_reloc;
2384 cmd_buffer->surface_batch_bo->length = cmd_buffer->surface_next;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002385
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002386 cmd_buffer->bo_count = 0;
2387 cmd_buffer->need_reloc = false;
2388
2389 /* Lock for access to bo->index. */
2390 pthread_mutex_lock(&device->mutex);
2391
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002392 /* Add surface state bos first so we can add them with their relocs. */
2393 for (struct anv_batch_bo *bbo = cmd_buffer->surface_batch_bo;
2394 bbo != NULL; bbo = bbo->prev_batch_bo) {
2395 anv_cmd_buffer_add_bo(cmd_buffer, &bbo->bo,
2396 &cmd_buffer->surface_relocs.relocs[bbo->first_reloc],
2397 bbo->num_relocs);
2398 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002399
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002400 /* Add all of the BOs referenced by surface state */
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002401 anv_cmd_buffer_add_validate_bos(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002402
2403 /* Add all but the first batch BO */
2404 struct anv_batch_bo *batch_bo = cmd_buffer->last_batch_bo;
2405 while (batch_bo->prev_batch_bo) {
2406 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2407 &batch->relocs.relocs[batch_bo->first_reloc],
2408 batch_bo->num_relocs);
2409 batch_bo = batch_bo->prev_batch_bo;
2410 }
2411
2412 /* Add everything referenced by the batches */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002413 anv_cmd_buffer_add_validate_bos(cmd_buffer, &batch->relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002414
2415 /* Add the first batch bo last */
2416 assert(batch_bo->prev_batch_bo == NULL && batch_bo->first_reloc == 0);
2417 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2418 &batch->relocs.relocs[batch_bo->first_reloc],
2419 batch_bo->num_relocs);
2420 assert(batch_bo->bo.index == cmd_buffer->bo_count - 1);
2421
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002422 anv_cmd_buffer_process_relocs(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002423 anv_cmd_buffer_process_relocs(cmd_buffer, &batch->relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002424
2425 cmd_buffer->execbuf.buffers_ptr = (uintptr_t) cmd_buffer->exec2_objects;
2426 cmd_buffer->execbuf.buffer_count = cmd_buffer->bo_count;
2427 cmd_buffer->execbuf.batch_start_offset = 0;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002428 cmd_buffer->execbuf.batch_len = batch->next - batch->start;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002429 cmd_buffer->execbuf.cliprects_ptr = 0;
2430 cmd_buffer->execbuf.num_cliprects = 0;
2431 cmd_buffer->execbuf.DR1 = 0;
2432 cmd_buffer->execbuf.DR4 = 0;
2433
2434 cmd_buffer->execbuf.flags = I915_EXEC_HANDLE_LUT;
2435 if (!cmd_buffer->need_reloc)
2436 cmd_buffer->execbuf.flags |= I915_EXEC_NO_RELOC;
2437 cmd_buffer->execbuf.flags |= I915_EXEC_RENDER;
2438 cmd_buffer->execbuf.rsvd1 = device->context_id;
2439 cmd_buffer->execbuf.rsvd2 = 0;
2440
2441 pthread_mutex_unlock(&device->mutex);
2442
2443 return VK_SUCCESS;
2444}
2445
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002446VkResult anv_ResetCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002447 VkCmdBuffer cmdBuffer)
2448{
2449 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2450
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002451 /* Delete all but the first batch bo */
2452 while (cmd_buffer->last_batch_bo->prev_batch_bo) {
2453 struct anv_batch_bo *prev = cmd_buffer->last_batch_bo->prev_batch_bo;
2454 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, cmd_buffer->device);
2455 cmd_buffer->last_batch_bo = prev;
2456 }
2457 assert(cmd_buffer->last_batch_bo->prev_batch_bo == NULL);
2458
2459 cmd_buffer->batch.relocs.num_relocs = 0;
2460 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2461 GEN8_MI_BATCH_BUFFER_START_length * 4);
2462
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002463 /* Delete all but the first batch bo */
2464 while (cmd_buffer->surface_batch_bo->prev_batch_bo) {
2465 struct anv_batch_bo *prev = cmd_buffer->surface_batch_bo->prev_batch_bo;
2466 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, cmd_buffer->device);
2467 cmd_buffer->surface_batch_bo = prev;
2468 }
2469 assert(cmd_buffer->surface_batch_bo->prev_batch_bo == NULL);
2470
2471 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002472 cmd_buffer->surface_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002473
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002474 cmd_buffer->rs_state = NULL;
2475 cmd_buffer->vp_state = NULL;
2476 cmd_buffer->cb_state = NULL;
2477 cmd_buffer->ds_state = NULL;
2478
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002479 return VK_SUCCESS;
2480}
2481
2482// Command buffer building functions
2483
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002484void anv_CmdBindPipeline(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002485 VkCmdBuffer cmdBuffer,
2486 VkPipelineBindPoint pipelineBindPoint,
2487 VkPipeline _pipeline)
2488{
2489 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002490 struct anv_pipeline *pipeline = (struct anv_pipeline *) _pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002491
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002492 switch (pipelineBindPoint) {
2493 case VK_PIPELINE_BIND_POINT_COMPUTE:
2494 cmd_buffer->compute_pipeline = pipeline;
2495 cmd_buffer->compute_dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2496 break;
2497
2498 case VK_PIPELINE_BIND_POINT_GRAPHICS:
2499 cmd_buffer->pipeline = pipeline;
2500 cmd_buffer->vb_dirty |= pipeline->vb_used;
2501 cmd_buffer->dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2502 break;
2503
2504 default:
2505 assert(!"invalid bind point");
2506 break;
2507 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002508}
2509
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002510void anv_CmdBindDynamicStateObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002511 VkCmdBuffer cmdBuffer,
2512 VkStateBindPoint stateBindPoint,
2513 VkDynamicStateObject dynamicState)
2514{
2515 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002516
2517 switch (stateBindPoint) {
2518 case VK_STATE_BIND_POINT_VIEWPORT:
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07002519 cmd_buffer->vp_state = (struct anv_dynamic_vp_state *) dynamicState;
2520 cmd_buffer->dirty |= ANV_CMD_BUFFER_VP_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002521 break;
2522 case VK_STATE_BIND_POINT_RASTER:
2523 cmd_buffer->rs_state = (struct anv_dynamic_rs_state *) dynamicState;
2524 cmd_buffer->dirty |= ANV_CMD_BUFFER_RS_DIRTY;
2525 break;
2526 case VK_STATE_BIND_POINT_COLOR_BLEND:
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07002527 cmd_buffer->cb_state = (struct anv_dynamic_cb_state *) dynamicState;
2528 cmd_buffer->dirty |= ANV_CMD_BUFFER_CB_DIRTY;
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002529 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002530 case VK_STATE_BIND_POINT_DEPTH_STENCIL:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002531 cmd_buffer->ds_state = (struct anv_dynamic_ds_state *) dynamicState;
2532 cmd_buffer->dirty |= ANV_CMD_BUFFER_DS_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002533 break;
2534 default:
2535 break;
2536 };
2537}
2538
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002539static struct anv_state
2540anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer *cmd_buffer,
2541 uint32_t size, uint32_t alignment)
2542{
2543 struct anv_state state;
2544
Chad Versace55752fe2015-06-26 15:07:59 -07002545 state.offset = align_u32(cmd_buffer->surface_next, alignment);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002546 if (state.offset + size > cmd_buffer->surface_batch_bo->bo.size)
2547 return (struct anv_state) { 0 };
2548
2549 state.map = cmd_buffer->surface_batch_bo->bo.map + state.offset;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002550 state.alloc_size = size;
2551 cmd_buffer->surface_next = state.offset + size;
2552
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002553 assert(state.offset + size <= cmd_buffer->surface_batch_bo->bo.size);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002554
2555 return state;
2556}
2557
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002558static VkResult
2559anv_cmd_buffer_new_surface_state_bo(struct anv_cmd_buffer *cmd_buffer)
2560{
2561 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->surface_batch_bo;
2562
2563 /* Finish off the old buffer */
2564 old_bbo->num_relocs =
2565 cmd_buffer->surface_relocs.num_relocs - old_bbo->first_reloc;
2566 old_bbo->length = cmd_buffer->surface_next;
2567
2568 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2569 if (result != VK_SUCCESS)
2570 return result;
2571
2572 new_bbo->first_reloc = cmd_buffer->surface_relocs.num_relocs;
2573 cmd_buffer->surface_next = 1;
2574
2575 new_bbo->prev_batch_bo = old_bbo;
2576 cmd_buffer->surface_batch_bo = new_bbo;
2577
2578 /* Re-emit state base addresses so we get the new surface state base
2579 * address before we start emitting binding tables etc.
2580 */
2581 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
2582
Jason Ekstrande497ac22015-05-30 18:04:48 -07002583 /* It seems like just changing the state base addresses isn't enough.
2584 * Invalidating the cache seems to be enough to cause things to
2585 * propagate. However, I'm not 100% sure what we're supposed to do.
Jason Ekstrand33cccbb2015-05-30 08:02:52 -07002586 */
2587 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
2588 .TextureCacheInvalidationEnable = true);
2589
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002590 return VK_SUCCESS;
2591}
2592
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002593void anv_CmdBindDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002594 VkCmdBuffer cmdBuffer,
2595 VkPipelineBindPoint pipelineBindPoint,
Jason Ekstrand435b0622015-07-07 17:06:10 -07002596 VkPipelineLayout _layout,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002597 uint32_t firstSet,
2598 uint32_t setCount,
2599 const VkDescriptorSet* pDescriptorSets,
2600 uint32_t dynamicOffsetCount,
2601 const uint32_t* pDynamicOffsets)
2602{
2603 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand435b0622015-07-07 17:06:10 -07002604 struct anv_pipeline_layout *layout = (struct anv_pipeline_layout *) _layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002605 struct anv_descriptor_set *set;
2606 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002607
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002608 assert(firstSet + setCount < MAX_SETS);
2609
2610 uint32_t dynamic_slot = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002611 for (uint32_t i = 0; i < setCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002612 set = (struct anv_descriptor_set *) pDescriptorSets[i];
2613 set_layout = layout->set[firstSet + i].layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002614
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002615 cmd_buffer->descriptors[firstSet + i].set = set;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002616
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002617 assert(set_layout->num_dynamic_buffers <
2618 ARRAY_SIZE(cmd_buffer->descriptors[0].dynamic_offsets));
2619 memcpy(cmd_buffer->descriptors[firstSet + i].dynamic_offsets,
2620 pDynamicOffsets + dynamic_slot,
2621 set_layout->num_dynamic_buffers * sizeof(*pDynamicOffsets));
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002622
Jason Ekstrand22513052015-05-30 10:07:29 -07002623 cmd_buffer->descriptors_dirty |= set_layout->shader_stages;
2624
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002625 dynamic_slot += set_layout->num_dynamic_buffers;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002626 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002627}
2628
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002629void anv_CmdBindIndexBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002630 VkCmdBuffer cmdBuffer,
2631 VkBuffer _buffer,
2632 VkDeviceSize offset,
2633 VkIndexType indexType)
2634{
2635 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2636 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
2637
2638 static const uint32_t vk_to_gen_index_type[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002639 [VK_INDEX_TYPE_UINT16] = INDEX_WORD,
2640 [VK_INDEX_TYPE_UINT32] = INDEX_DWORD,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002641 };
2642
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002643 struct GEN8_3DSTATE_VF vf = {
2644 GEN8_3DSTATE_VF_header,
2645 .CutIndex = (indexType == VK_INDEX_TYPE_UINT16) ? UINT16_MAX : UINT32_MAX,
2646 };
2647 GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state_vf, &vf);
2648
2649 cmd_buffer->dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY;
2650
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002651 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
2652 .IndexFormat = vk_to_gen_index_type[indexType],
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002653 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002654 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002655 .BufferSize = buffer->size - offset);
2656}
2657
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002658void anv_CmdBindVertexBuffers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002659 VkCmdBuffer cmdBuffer,
2660 uint32_t startBinding,
2661 uint32_t bindingCount,
2662 const VkBuffer* pBuffers,
2663 const VkDeviceSize* pOffsets)
2664{
2665 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002666 struct anv_vertex_binding *vb = cmd_buffer->vertex_bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002667
2668 /* We have to defer setting up vertex buffer since we need the buffer
2669 * stride from the pipeline. */
2670
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002671 assert(startBinding + bindingCount < MAX_VBS);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002672 for (uint32_t i = 0; i < bindingCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002673 vb[startBinding + i].buffer = (struct anv_buffer *) pBuffers[i];
2674 vb[startBinding + i].offset = pOffsets[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002675 cmd_buffer->vb_dirty |= 1 << (startBinding + i);
2676 }
2677}
2678
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002679static VkResult
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002680cmd_buffer_emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002681 unsigned stage, struct anv_state *bt_state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002682{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002683 struct anv_pipeline_layout *layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002684 uint32_t color_attachments, bias, size;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002685
2686 if (stage == VK_SHADER_STAGE_COMPUTE)
2687 layout = cmd_buffer->compute_pipeline->layout;
2688 else
2689 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002690
2691 if (stage == VK_SHADER_STAGE_FRAGMENT) {
2692 bias = MAX_RTS;
2693 color_attachments = cmd_buffer->framebuffer->color_attachment_count;
2694 } else {
2695 bias = 0;
2696 color_attachments = 0;
2697 }
2698
2699 /* This is a little awkward: layout can be NULL but we still have to
2700 * allocate and set a binding table for the PS stage for render
2701 * targets. */
2702 uint32_t surface_count = layout ? layout->stage[stage].surface_count : 0;
2703
2704 if (color_attachments + surface_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002705 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002706
2707 size = (bias + surface_count) * sizeof(uint32_t);
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002708 *bt_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
2709 uint32_t *bt_map = bt_state->map;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002710
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002711 if (bt_state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002712 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2713
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002714 for (uint32_t ca = 0; ca < color_attachments; ca++) {
2715 const struct anv_surface_view *view =
2716 cmd_buffer->framebuffer->color_attachments[ca];
2717
2718 struct anv_state state =
2719 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2720
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002721 if (state.map == NULL)
2722 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2723
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002724 memcpy(state.map, view->surface_state.map, 64);
2725
2726 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2727 *(uint64_t *)(state.map + 8 * 4) =
2728 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2729 cmd_buffer->device,
2730 state.offset + 8 * 4,
2731 view->bo, view->offset);
2732
2733 bt_map[ca] = state.offset;
2734 }
2735
2736 if (layout == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002737 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002738
2739 for (uint32_t set = 0; set < layout->num_sets; set++) {
2740 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2741 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2742 struct anv_descriptor_slot *surface_slots =
2743 set_layout->stage[stage].surface_start;
2744
2745 uint32_t start = bias + layout->set[set].surface_start[stage];
2746
2747 for (uint32_t b = 0; b < set_layout->stage[stage].surface_count; b++) {
2748 struct anv_surface_view *view =
2749 d->set->descriptors[surface_slots[b].index].view;
2750
Jason Ekstrand03ffa9c2015-05-29 20:43:10 -07002751 if (!view)
2752 continue;
2753
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002754 struct anv_state state =
2755 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2756
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002757 if (state.map == NULL)
2758 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2759
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002760 uint32_t offset;
2761 if (surface_slots[b].dynamic_slot >= 0) {
2762 uint32_t dynamic_offset =
2763 d->dynamic_offsets[surface_slots[b].dynamic_slot];
2764
2765 offset = view->offset + dynamic_offset;
2766 fill_buffer_surface_state(state.map, view->format, offset,
2767 view->range - dynamic_offset);
2768 } else {
2769 offset = view->offset;
2770 memcpy(state.map, view->surface_state.map, 64);
2771 }
2772
2773 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2774 *(uint64_t *)(state.map + 8 * 4) =
2775 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2776 cmd_buffer->device,
2777 state.offset + 8 * 4,
2778 view->bo, offset);
2779
2780 bt_map[start + b] = state.offset;
2781 }
2782 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002783
2784 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002785}
2786
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002787static VkResult
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002788cmd_buffer_emit_samplers(struct anv_cmd_buffer *cmd_buffer,
2789 unsigned stage, struct anv_state *state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002790{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002791 struct anv_pipeline_layout *layout;
2792 uint32_t sampler_count;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002793
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002794 if (stage == VK_SHADER_STAGE_COMPUTE)
2795 layout = cmd_buffer->compute_pipeline->layout;
2796 else
2797 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002798
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002799 sampler_count = layout ? layout->stage[stage].sampler_count : 0;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002800 if (sampler_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002801 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002802
2803 uint32_t size = sampler_count * 16;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002804 *state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream, size, 32);
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002805
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002806 if (state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002807 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2808
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002809 for (uint32_t set = 0; set < layout->num_sets; set++) {
2810 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2811 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2812 struct anv_descriptor_slot *sampler_slots =
2813 set_layout->stage[stage].sampler_start;
2814
2815 uint32_t start = layout->set[set].sampler_start[stage];
2816
2817 for (uint32_t b = 0; b < set_layout->stage[stage].sampler_count; b++) {
2818 struct anv_sampler *sampler =
2819 d->set->descriptors[sampler_slots[b].index].sampler;
2820
2821 if (!sampler)
2822 continue;
2823
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002824 memcpy(state->map + (start + b) * 16,
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002825 sampler->state, sizeof(sampler->state));
2826 }
2827 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002828
2829 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002830}
2831
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002832static VkResult
2833flush_descriptor_set(struct anv_cmd_buffer *cmd_buffer, uint32_t stage)
2834{
2835 struct anv_state surfaces = { 0, }, samplers = { 0, };
2836 VkResult result;
2837
2838 result = cmd_buffer_emit_samplers(cmd_buffer, stage, &samplers);
2839 if (result != VK_SUCCESS)
2840 return result;
2841 result = cmd_buffer_emit_binding_table(cmd_buffer, stage, &surfaces);
2842 if (result != VK_SUCCESS)
2843 return result;
2844
2845 static const uint32_t sampler_state_opcodes[] = {
2846 [VK_SHADER_STAGE_VERTEX] = 43,
2847 [VK_SHADER_STAGE_TESS_CONTROL] = 44, /* HS */
2848 [VK_SHADER_STAGE_TESS_EVALUATION] = 45, /* DS */
2849 [VK_SHADER_STAGE_GEOMETRY] = 46,
2850 [VK_SHADER_STAGE_FRAGMENT] = 47,
2851 [VK_SHADER_STAGE_COMPUTE] = 0,
2852 };
2853
2854 static const uint32_t binding_table_opcodes[] = {
2855 [VK_SHADER_STAGE_VERTEX] = 38,
2856 [VK_SHADER_STAGE_TESS_CONTROL] = 39,
2857 [VK_SHADER_STAGE_TESS_EVALUATION] = 40,
2858 [VK_SHADER_STAGE_GEOMETRY] = 41,
2859 [VK_SHADER_STAGE_FRAGMENT] = 42,
2860 [VK_SHADER_STAGE_COMPUTE] = 0,
2861 };
2862
2863 if (samplers.alloc_size > 0) {
2864 anv_batch_emit(&cmd_buffer->batch,
2865 GEN8_3DSTATE_SAMPLER_STATE_POINTERS_VS,
2866 ._3DCommandSubOpcode = sampler_state_opcodes[stage],
2867 .PointertoVSSamplerState = samplers.offset);
2868 }
2869
2870 if (surfaces.alloc_size > 0) {
2871 anv_batch_emit(&cmd_buffer->batch,
2872 GEN8_3DSTATE_BINDING_TABLE_POINTERS_VS,
2873 ._3DCommandSubOpcode = binding_table_opcodes[stage],
2874 .PointertoVSBindingTable = surfaces.offset);
2875 }
2876
2877 return VK_SUCCESS;
2878}
2879
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002880static void
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002881flush_descriptor_sets(struct anv_cmd_buffer *cmd_buffer)
2882{
Jason Ekstrand22513052015-05-30 10:07:29 -07002883 uint32_t s, dirty = cmd_buffer->descriptors_dirty &
2884 cmd_buffer->pipeline->active_stages;
2885
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002886 VkResult result;
Jason Ekstrand22513052015-05-30 10:07:29 -07002887 for_each_bit(s, dirty) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002888 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002889 if (result != VK_SUCCESS)
2890 break;
2891 }
2892
2893 if (result != VK_SUCCESS) {
2894 assert(result == VK_ERROR_OUT_OF_DEVICE_MEMORY);
2895
2896 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
2897 assert(result == VK_SUCCESS);
2898
Jason Ekstrand22513052015-05-30 10:07:29 -07002899 /* Re-emit all active binding tables */
2900 for_each_bit(s, cmd_buffer->pipeline->active_stages) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002901 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002902
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002903 /* It had better succeed this time */
2904 assert(result == VK_SUCCESS);
2905 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002906 }
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002907
Jason Ekstrand22513052015-05-30 10:07:29 -07002908 cmd_buffer->descriptors_dirty &= ~cmd_buffer->pipeline->active_stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002909}
2910
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002911static struct anv_state
2912anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
2913 uint32_t *a, uint32_t dwords, uint32_t alignment)
2914{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002915 struct anv_state state;
2916
Jason Ekstrandce002332015-06-05 17:14:41 -07002917 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2918 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002919 memcpy(state.map, a, dwords * 4);
2920
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002921 VG(VALGRIND_CHECK_MEM_IS_DEFINED(state.map, dwords * 4));
2922
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002923 return state;
2924}
2925
2926static struct anv_state
2927anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrandce002332015-06-05 17:14:41 -07002928 uint32_t *a, uint32_t *b,
2929 uint32_t dwords, uint32_t alignment)
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002930{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002931 struct anv_state state;
2932 uint32_t *p;
2933
Jason Ekstrandce002332015-06-05 17:14:41 -07002934 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2935 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002936 p = state.map;
2937 for (uint32_t i = 0; i < dwords; i++)
2938 p[i] = a[i] | b[i];
2939
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002940 VG(VALGRIND_CHECK_MEM_IS_DEFINED(p, dwords * 4));
2941
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002942 return state;
2943}
2944
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002945static VkResult
2946flush_compute_descriptor_set(struct anv_cmd_buffer *cmd_buffer)
2947{
2948 struct anv_device *device = cmd_buffer->device;
2949 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2950 struct anv_state surfaces = { 0, }, samplers = { 0, };
2951 VkResult result;
2952
2953 result = cmd_buffer_emit_samplers(cmd_buffer,
2954 VK_SHADER_STAGE_COMPUTE, &samplers);
2955 if (result != VK_SUCCESS)
2956 return result;
2957 result = cmd_buffer_emit_binding_table(cmd_buffer,
2958 VK_SHADER_STAGE_COMPUTE, &surfaces);
2959 if (result != VK_SUCCESS)
2960 return result;
2961
2962 struct GEN8_INTERFACE_DESCRIPTOR_DATA desc = {
2963 .KernelStartPointer = pipeline->cs_simd,
2964 .KernelStartPointerHigh = 0,
2965 .BindingTablePointer = surfaces.offset,
2966 .BindingTableEntryCount = 0,
2967 .SamplerStatePointer = samplers.offset,
2968 .SamplerCount = 0,
2969 .NumberofThreadsinGPGPUThreadGroup = 0 /* FIXME: Really? */
2970 };
2971
2972 uint32_t size = GEN8_INTERFACE_DESCRIPTOR_DATA_length * sizeof(uint32_t);
2973 struct anv_state state =
2974 anv_state_pool_alloc(&device->dynamic_state_pool, size, 64);
2975
2976 GEN8_INTERFACE_DESCRIPTOR_DATA_pack(NULL, state.map, &desc);
2977
2978 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_INTERFACE_DESCRIPTOR_LOAD,
2979 .InterfaceDescriptorTotalLength = size,
2980 .InterfaceDescriptorDataStartAddress = state.offset);
2981
2982 return VK_SUCCESS;
2983}
2984
2985static void
2986anv_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer)
2987{
2988 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2989 VkResult result;
2990
2991 assert(pipeline->active_stages == VK_SHADER_STAGE_COMPUTE_BIT);
2992
2993 if (cmd_buffer->current_pipeline != GPGPU) {
2994 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
2995 .PipelineSelection = GPGPU);
2996 cmd_buffer->current_pipeline = GPGPU;
2997 }
2998
2999 if (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)
3000 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
3001
3002 if ((cmd_buffer->descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) ||
3003 (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) {
3004 result = flush_compute_descriptor_set(cmd_buffer);
3005 if (result != VK_SUCCESS) {
3006 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
3007 assert(result == VK_SUCCESS);
3008 result = flush_compute_descriptor_set(cmd_buffer);
3009 assert(result == VK_SUCCESS);
3010 }
3011 cmd_buffer->descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE;
3012 }
3013
3014 cmd_buffer->compute_dirty = 0;
3015}
3016
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003017static void
3018anv_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
3019{
3020 struct anv_pipeline *pipeline = cmd_buffer->pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003021 uint32_t *p;
3022
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003023 uint32_t vb_emit = cmd_buffer->vb_dirty & pipeline->vb_used;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003024
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07003025 assert((pipeline->active_stages & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
3026
3027 if (cmd_buffer->current_pipeline != _3D) {
3028 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
3029 .PipelineSelection = _3D);
3030 cmd_buffer->current_pipeline = _3D;
3031 }
3032
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003033 if (vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003034 const uint32_t num_buffers = __builtin_popcount(vb_emit);
3035 const uint32_t num_dwords = 1 + num_buffers * 4;
3036
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003037 p = anv_batch_emitn(&cmd_buffer->batch, num_dwords,
3038 GEN8_3DSTATE_VERTEX_BUFFERS);
3039 uint32_t vb, i = 0;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003040 for_each_bit(vb, vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003041 struct anv_buffer *buffer = cmd_buffer->vertex_bindings[vb].buffer;
3042 uint32_t offset = cmd_buffer->vertex_bindings[vb].offset;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003043
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003044 struct GEN8_VERTEX_BUFFER_STATE state = {
3045 .VertexBufferIndex = vb,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07003046 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003047 .AddressModifyEnable = true,
3048 .BufferPitch = pipeline->binding_stride[vb],
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003049 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003050 .BufferSize = buffer->size - offset
3051 };
3052
3053 GEN8_VERTEX_BUFFER_STATE_pack(&cmd_buffer->batch, &p[1 + i * 4], &state);
3054 i++;
3055 }
3056 }
3057
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003058 if (cmd_buffer->dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) {
3059 /* If somebody compiled a pipeline after starting a command buffer the
3060 * scratch bo may have grown since we started this cmd buffer (and
3061 * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now,
3062 * reemit STATE_BASE_ADDRESS so that we use the bigger scratch bo. */
3063 if (cmd_buffer->scratch_size < pipeline->total_scratch)
3064 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
3065
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003066 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003067 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003068
Jason Ekstrand22513052015-05-30 10:07:29 -07003069 if (cmd_buffer->descriptors_dirty)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003070 flush_descriptor_sets(cmd_buffer);
3071
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07003072 if (cmd_buffer->dirty & ANV_CMD_BUFFER_VP_DIRTY) {
3073 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SCISSOR_STATE_POINTERS,
3074 .ScissorRectPointer = cmd_buffer->vp_state->scissor.offset);
3075 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_CC,
3076 .CCViewportPointer = cmd_buffer->vp_state->cc_vp.offset);
3077 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_SF_CLIP,
3078 .SFClipViewportPointer = cmd_buffer->vp_state->sf_clip_vp.offset);
3079 }
3080
Kristian Høgsberg99883772015-05-26 09:40:10 -07003081 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_RS_DIRTY)) {
Kristian Høgsberg55b9b702015-05-11 22:23:38 -07003082 anv_batch_emit_merge(&cmd_buffer->batch,
3083 cmd_buffer->rs_state->state_sf, pipeline->state_sf);
Kristian Høgsberg99883772015-05-26 09:40:10 -07003084 anv_batch_emit_merge(&cmd_buffer->batch,
3085 cmd_buffer->rs_state->state_raster, pipeline->state_raster);
3086 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003087
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07003088 if (cmd_buffer->ds_state &&
3089 (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)))
3090 anv_batch_emit_merge(&cmd_buffer->batch,
3091 cmd_buffer->ds_state->state_wm_depth_stencil,
3092 pipeline->state_wm_depth_stencil);
3093
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003094 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_CB_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)) {
3095 struct anv_state state;
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003096 if (cmd_buffer->ds_state == NULL)
3097 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3098 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003099 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003100 else if (cmd_buffer->cb_state == NULL)
3101 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3102 cmd_buffer->ds_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003103 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003104 else
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003105 state = anv_cmd_buffer_merge_dynamic(cmd_buffer,
3106 cmd_buffer->ds_state->state_color_calc,
3107 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003108 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003109
3110 anv_batch_emit(&cmd_buffer->batch,
3111 GEN8_3DSTATE_CC_STATE_POINTERS,
3112 .ColorCalcStatePointer = state.offset,
3113 .ColorCalcStatePointerValid = true);
3114 }
3115
Jason Ekstrand7fbed522015-07-07 15:11:56 -07003116 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) {
3117 anv_batch_emit_merge(&cmd_buffer->batch,
3118 cmd_buffer->state_vf, pipeline->state_vf);
3119 }
3120
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003121 cmd_buffer->vb_dirty &= ~vb_emit;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003122 cmd_buffer->dirty = 0;
3123}
3124
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003125void anv_CmdDraw(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003126 VkCmdBuffer cmdBuffer,
3127 uint32_t firstVertex,
3128 uint32_t vertexCount,
3129 uint32_t firstInstance,
3130 uint32_t instanceCount)
3131{
3132 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3133
3134 anv_cmd_buffer_flush_state(cmd_buffer);
3135
3136 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3137 .VertexAccessType = SEQUENTIAL,
3138 .VertexCountPerInstance = vertexCount,
3139 .StartVertexLocation = firstVertex,
3140 .InstanceCount = instanceCount,
3141 .StartInstanceLocation = firstInstance,
3142 .BaseVertexLocation = 0);
3143}
3144
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003145void anv_CmdDrawIndexed(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003146 VkCmdBuffer cmdBuffer,
3147 uint32_t firstIndex,
3148 uint32_t indexCount,
3149 int32_t vertexOffset,
3150 uint32_t firstInstance,
3151 uint32_t instanceCount)
3152{
3153 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3154
3155 anv_cmd_buffer_flush_state(cmd_buffer);
3156
3157 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3158 .VertexAccessType = RANDOM,
3159 .VertexCountPerInstance = indexCount,
3160 .StartVertexLocation = firstIndex,
3161 .InstanceCount = instanceCount,
3162 .StartInstanceLocation = firstInstance,
Kristian Høgsberg Kristensenc8f07852015-06-02 22:35:47 -07003163 .BaseVertexLocation = vertexOffset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003164}
3165
3166static void
3167anv_batch_lrm(struct anv_batch *batch,
3168 uint32_t reg, struct anv_bo *bo, uint32_t offset)
3169{
3170 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3171 .RegisterAddress = reg,
3172 .MemoryAddress = { bo, offset });
3173}
3174
3175static void
3176anv_batch_lri(struct anv_batch *batch, uint32_t reg, uint32_t imm)
3177{
3178 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_IMM,
3179 .RegisterOffset = reg,
3180 .DataDWord = imm);
3181}
3182
3183/* Auto-Draw / Indirect Registers */
3184#define GEN7_3DPRIM_END_OFFSET 0x2420
3185#define GEN7_3DPRIM_START_VERTEX 0x2430
3186#define GEN7_3DPRIM_VERTEX_COUNT 0x2434
3187#define GEN7_3DPRIM_INSTANCE_COUNT 0x2438
3188#define GEN7_3DPRIM_START_INSTANCE 0x243C
3189#define GEN7_3DPRIM_BASE_VERTEX 0x2440
3190
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003191void anv_CmdDrawIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003192 VkCmdBuffer cmdBuffer,
3193 VkBuffer _buffer,
3194 VkDeviceSize offset,
3195 uint32_t count,
3196 uint32_t stride)
3197{
3198 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3199 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003200 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003201 uint32_t bo_offset = buffer->offset + offset;
3202
3203 anv_cmd_buffer_flush_state(cmd_buffer);
3204
3205 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3206 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3207 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3208 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 12);
3209 anv_batch_lri(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, 0);
3210
3211 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3212 .IndirectParameterEnable = true,
3213 .VertexAccessType = SEQUENTIAL);
3214}
3215
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003216void anv_CmdDrawIndexedIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003217 VkCmdBuffer cmdBuffer,
3218 VkBuffer _buffer,
3219 VkDeviceSize offset,
3220 uint32_t count,
3221 uint32_t stride)
3222{
3223 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3224 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003225 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003226 uint32_t bo_offset = buffer->offset + offset;
3227
3228 anv_cmd_buffer_flush_state(cmd_buffer);
3229
3230 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3231 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3232 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3233 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, bo, bo_offset + 12);
3234 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 16);
3235
3236 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3237 .IndirectParameterEnable = true,
3238 .VertexAccessType = RANDOM);
3239}
3240
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003241void anv_CmdDispatch(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003242 VkCmdBuffer cmdBuffer,
3243 uint32_t x,
3244 uint32_t y,
3245 uint32_t z)
3246{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003247 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003248 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3249 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003250
3251 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3252
3253 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003254 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003255 .ThreadDepthCounterMaximum = 0,
3256 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003257 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003258 .ThreadGroupIDXDimension = x,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003259 .ThreadGroupIDYDimension = y,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003260 .ThreadGroupIDZDimension = z,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003261 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003262 .BottomExecutionMask = 0xffffffff);
3263
3264 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003265}
3266
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003267#define GPGPU_DISPATCHDIMX 0x2500
3268#define GPGPU_DISPATCHDIMY 0x2504
3269#define GPGPU_DISPATCHDIMZ 0x2508
3270
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003271void anv_CmdDispatchIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003272 VkCmdBuffer cmdBuffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003273 VkBuffer _buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003274 VkDeviceSize offset)
3275{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003276 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003277 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3278 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003279 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
3280 struct anv_bo *bo = buffer->bo;
3281 uint32_t bo_offset = buffer->offset + offset;
3282
3283 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3284
3285 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMX, bo, bo_offset);
3286 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMY, bo, bo_offset + 4);
3287 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMZ, bo, bo_offset + 8);
3288
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003289 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
3290 .IndirectParameterEnable = true,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003291 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003292 .ThreadDepthCounterMaximum = 0,
3293 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003294 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
3295 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003296 .BottomExecutionMask = 0xffffffff);
3297
3298 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003299}
3300
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003301void anv_CmdSetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003302 VkCmdBuffer cmdBuffer,
3303 VkEvent event,
3304 VkPipeEvent pipeEvent)
3305{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003306 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003307}
3308
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003309void anv_CmdResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003310 VkCmdBuffer cmdBuffer,
3311 VkEvent event,
3312 VkPipeEvent pipeEvent)
3313{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003314 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003315}
3316
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003317void anv_CmdWaitEvents(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003318 VkCmdBuffer cmdBuffer,
3319 VkWaitEvent waitEvent,
3320 uint32_t eventCount,
3321 const VkEvent* pEvents,
Chad Versace85c0d692015-07-07 15:49:57 -07003322 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003323 uint32_t memBarrierCount,
Chad Versace85c0d692015-07-07 15:49:57 -07003324 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003325{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003326 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003327}
3328
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003329void anv_CmdPipelineBarrier(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003330 VkCmdBuffer cmdBuffer,
3331 VkWaitEvent waitEvent,
Chad Versace18ee32e2015-07-07 15:42:38 -07003332 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003333 uint32_t memBarrierCount,
Chad Versace18ee32e2015-07-07 15:42:38 -07003334 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003335{
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003336 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3337 uint32_t b, *dw;
3338
3339 struct GEN8_PIPE_CONTROL cmd = {
3340 GEN8_PIPE_CONTROL_header,
3341 .PostSyncOperation = NoWrite,
3342 };
3343
3344 /* XXX: I think waitEvent is a no-op on our HW. We should verify that. */
3345
Chad Versace18ee32e2015-07-07 15:42:38 -07003346 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_TOP_OF_PIPE_BIT)) {
3347 /* This is just what PIPE_CONTROL does */
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003348 }
3349
Chad Versace18ee32e2015-07-07 15:42:38 -07003350 if (anv_clear_mask(&pipeEventMask,
3351 VK_PIPE_EVENT_VERTEX_PROCESSING_COMPLETE_BIT |
3352 VK_PIPE_EVENT_LOCAL_FRAGMENT_PROCESSING_COMPLETE_BIT |
3353 VK_PIPE_EVENT_FRAGMENT_PROCESSING_COMPLETE_BIT)) {
3354 cmd.StallAtPixelScoreboard = true;
3355 }
3356
3357
3358 if (anv_clear_mask(&pipeEventMask,
3359 VK_PIPE_EVENT_GRAPHICS_PIPELINE_COMPLETE_BIT |
3360 VK_PIPE_EVENT_COMPUTE_PIPELINE_COMPLETE_BIT |
3361 VK_PIPE_EVENT_TRANSFER_COMPLETE_BIT |
3362 VK_PIPE_EVENT_COMMANDS_COMPLETE_BIT)) {
3363 cmd.CommandStreamerStallEnable = true;
3364 }
3365
3366 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_CPU_SIGNAL_BIT)) {
3367 anv_finishme("VK_PIPE_EVENT_CPU_SIGNAL_BIT");
3368 }
3369
3370 /* We checked all known VkPipeEventFlags. */
3371 anv_assert(pipeEventMask == 0);
3372
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003373 /* XXX: Right now, we're really dumb and just flush whatever categories
3374 * the app asks for. One of these days we may make this a bit better
3375 * but right now that's all the hardware allows for in most areas.
3376 */
3377 VkMemoryOutputFlags out_flags = 0;
3378 VkMemoryInputFlags in_flags = 0;
3379
3380 for (uint32_t i = 0; i < memBarrierCount; i++) {
3381 const struct anv_common *common = ppMemBarriers[i];
3382 switch (common->sType) {
3383 case VK_STRUCTURE_TYPE_MEMORY_BARRIER: {
3384 const VkMemoryBarrier *barrier = (VkMemoryBarrier *)common;
3385 out_flags |= barrier->outputMask;
3386 in_flags |= barrier->inputMask;
3387 break;
3388 }
3389 case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: {
3390 const VkBufferMemoryBarrier *barrier = (VkBufferMemoryBarrier *)common;
3391 out_flags |= barrier->outputMask;
3392 in_flags |= barrier->inputMask;
3393 break;
3394 }
3395 case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER: {
3396 const VkImageMemoryBarrier *barrier = (VkImageMemoryBarrier *)common;
3397 out_flags |= barrier->outputMask;
3398 in_flags |= barrier->inputMask;
3399 break;
3400 }
3401 default:
3402 unreachable("Invalid memory barrier type");
3403 }
3404 }
3405
3406 for_each_bit(b, out_flags) {
3407 switch ((VkMemoryOutputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003408 case VK_MEMORY_OUTPUT_HOST_WRITE_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003409 break; /* FIXME: Little-core systems */
3410 case VK_MEMORY_OUTPUT_SHADER_WRITE_BIT:
3411 cmd.DCFlushEnable = true;
3412 break;
3413 case VK_MEMORY_OUTPUT_COLOR_ATTACHMENT_BIT:
3414 cmd.RenderTargetCacheFlushEnable = true;
3415 break;
3416 case VK_MEMORY_OUTPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3417 cmd.DepthCacheFlushEnable = true;
3418 break;
3419 case VK_MEMORY_OUTPUT_TRANSFER_BIT:
3420 cmd.RenderTargetCacheFlushEnable = true;
3421 cmd.DepthCacheFlushEnable = true;
3422 break;
3423 default:
3424 unreachable("Invalid memory output flag");
3425 }
3426 }
3427
3428 for_each_bit(b, out_flags) {
3429 switch ((VkMemoryInputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003430 case VK_MEMORY_INPUT_HOST_READ_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003431 break; /* FIXME: Little-core systems */
3432 case VK_MEMORY_INPUT_INDIRECT_COMMAND_BIT:
3433 case VK_MEMORY_INPUT_INDEX_FETCH_BIT:
3434 case VK_MEMORY_INPUT_VERTEX_ATTRIBUTE_FETCH_BIT:
3435 cmd.VFCacheInvalidationEnable = true;
3436 break;
3437 case VK_MEMORY_INPUT_UNIFORM_READ_BIT:
3438 cmd.ConstantCacheInvalidationEnable = true;
3439 /* fallthrough */
3440 case VK_MEMORY_INPUT_SHADER_READ_BIT:
3441 cmd.DCFlushEnable = true;
3442 cmd.TextureCacheInvalidationEnable = true;
3443 break;
3444 case VK_MEMORY_INPUT_COLOR_ATTACHMENT_BIT:
3445 case VK_MEMORY_INPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3446 break; /* XXX: Hunh? */
3447 case VK_MEMORY_INPUT_TRANSFER_BIT:
3448 cmd.TextureCacheInvalidationEnable = true;
3449 break;
3450 }
3451 }
3452
3453 dw = anv_batch_emit_dwords(&cmd_buffer->batch, GEN8_PIPE_CONTROL_length);
3454 GEN8_PIPE_CONTROL_pack(&cmd_buffer->batch, dw, &cmd);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003455}
3456
Jason Ekstrand57153da2015-05-22 15:15:08 -07003457static void
3458anv_framebuffer_destroy(struct anv_device *device,
3459 struct anv_object *object,
3460 VkObjectType obj_type)
3461{
3462 struct anv_framebuffer *fb = (struct anv_framebuffer *)object;
3463
3464 assert(obj_type == VK_OBJECT_TYPE_FRAMEBUFFER);
3465
3466 anv_DestroyObject((VkDevice) device,
3467 VK_OBJECT_TYPE_DYNAMIC_VP_STATE,
3468 fb->vp_state);
3469
3470 anv_device_free(device, fb);
3471}
3472
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003473VkResult anv_CreateFramebuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003474 VkDevice _device,
3475 const VkFramebufferCreateInfo* pCreateInfo,
3476 VkFramebuffer* pFramebuffer)
3477{
3478 struct anv_device *device = (struct anv_device *) _device;
3479 struct anv_framebuffer *framebuffer;
3480
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003481 static const struct anv_depth_stencil_view null_view =
3482 { .depth_format = D16_UNORM, .depth_stride = 0, .stencil_stride = 0 };
3483
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003484 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
3485
3486 framebuffer = anv_device_alloc(device, sizeof(*framebuffer), 8,
3487 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3488 if (framebuffer == NULL)
3489 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3490
Jason Ekstrand57153da2015-05-22 15:15:08 -07003491 framebuffer->base.destructor = anv_framebuffer_destroy;
3492
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003493 framebuffer->color_attachment_count = pCreateInfo->colorAttachmentCount;
3494 for (uint32_t i = 0; i < pCreateInfo->colorAttachmentCount; i++) {
3495 framebuffer->color_attachments[i] =
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07003496 (struct anv_surface_view *) pCreateInfo->pColorAttachments[i].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003497 }
3498
3499 if (pCreateInfo->pDepthStencilAttachment) {
3500 framebuffer->depth_stencil =
3501 (struct anv_depth_stencil_view *) pCreateInfo->pDepthStencilAttachment->view;
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003502 } else {
3503 framebuffer->depth_stencil = &null_view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003504 }
3505
3506 framebuffer->sample_count = pCreateInfo->sampleCount;
3507 framebuffer->width = pCreateInfo->width;
3508 framebuffer->height = pCreateInfo->height;
3509 framebuffer->layers = pCreateInfo->layers;
3510
Jason Ekstrand919e7b72015-06-09 16:01:56 -07003511 anv_CreateDynamicViewportState((VkDevice) device,
Jason Ekstrand0599d392015-06-09 15:53:10 -07003512 &(VkDynamicVpStateCreateInfo) {
3513 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO,
3514 .viewportAndScissorCount = 1,
3515 .pViewports = (VkViewport[]) {
3516 {
3517 .originX = 0,
3518 .originY = 0,
3519 .width = pCreateInfo->width,
3520 .height = pCreateInfo->height,
3521 .minDepth = 0,
3522 .maxDepth = 1
3523 },
3524 },
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07003525 .pScissors = (VkRect2D[]) {
Jason Ekstrand0599d392015-06-09 15:53:10 -07003526 { { 0, 0 },
3527 { pCreateInfo->width, pCreateInfo->height } },
3528 }
3529 },
3530 &framebuffer->vp_state);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003531
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003532 *pFramebuffer = (VkFramebuffer) framebuffer;
3533
3534 return VK_SUCCESS;
3535}
3536
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003537VkResult anv_CreateRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003538 VkDevice _device,
3539 const VkRenderPassCreateInfo* pCreateInfo,
3540 VkRenderPass* pRenderPass)
3541{
3542 struct anv_device *device = (struct anv_device *) _device;
3543 struct anv_render_pass *pass;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003544 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003545
3546 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
3547
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003548 size = sizeof(*pass) +
3549 pCreateInfo->layers * sizeof(struct anv_render_pass_layer);
3550 pass = anv_device_alloc(device, size, 8,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003551 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3552 if (pass == NULL)
3553 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3554
3555 pass->render_area = pCreateInfo->renderArea;
3556
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003557 pass->num_layers = pCreateInfo->layers;
3558
3559 pass->num_clear_layers = 0;
3560 for (uint32_t i = 0; i < pCreateInfo->layers; i++) {
3561 pass->layers[i].color_load_op = pCreateInfo->pColorLoadOps[i];
3562 pass->layers[i].clear_color = pCreateInfo->pColorLoadClearValues[i];
3563 if (pass->layers[i].color_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR)
3564 pass->num_clear_layers++;
3565 }
3566
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003567 *pRenderPass = (VkRenderPass) pass;
3568
3569 return VK_SUCCESS;
3570}
3571
Jason Ekstrand0ff06542015-07-07 17:11:35 -07003572VkResult anv_GetRenderAreaGranularity(
3573 VkDevice device,
3574 VkRenderPass renderPass,
3575 VkExtent2D* pGranularity)
3576{
3577 *pGranularity = (VkExtent2D) { 1, 1 };
3578
3579 return VK_SUCCESS;
3580}
3581
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003582static void
3583anv_cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer,
3584 struct anv_render_pass *pass)
3585{
3586 const struct anv_depth_stencil_view *view =
3587 cmd_buffer->framebuffer->depth_stencil;
3588
3589 /* FIXME: Implement the PMA stall W/A */
Chad Versace709fa462015-06-26 22:15:03 -07003590 /* FIXME: Width and Height are wrong */
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003591
3592 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DEPTH_BUFFER,
3593 .SurfaceType = SURFTYPE_2D,
3594 .DepthWriteEnable = view->depth_stride > 0,
3595 .StencilWriteEnable = view->stencil_stride > 0,
3596 .HierarchicalDepthBufferEnable = false,
3597 .SurfaceFormat = view->depth_format,
3598 .SurfacePitch = view->depth_stride > 0 ? view->depth_stride - 1 : 0,
3599 .SurfaceBaseAddress = { view->bo, view->depth_offset },
3600 .Height = pass->render_area.extent.height - 1,
3601 .Width = pass->render_area.extent.width - 1,
3602 .LOD = 0,
3603 .Depth = 1 - 1,
3604 .MinimumArrayElement = 0,
3605 .DepthBufferObjectControlState = GEN8_MOCS,
3606 .RenderTargetViewExtent = 1 - 1,
Chad Versace7ea707a2015-06-25 19:46:42 -07003607 .SurfaceQPitch = view->depth_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003608
3609 /* Disable hierarchial depth buffers. */
3610 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HIER_DEPTH_BUFFER);
3611
3612 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STENCIL_BUFFER,
3613 .StencilBufferEnable = view->stencil_stride > 0,
3614 .StencilBufferObjectControlState = GEN8_MOCS,
3615 .SurfacePitch = view->stencil_stride > 0 ? view->stencil_stride - 1 : 0,
3616 .SurfaceBaseAddress = { view->bo, view->stencil_offset },
Chad Versace7ea707a2015-06-25 19:46:42 -07003617 .SurfaceQPitch = view->stencil_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003618
3619 /* Clear the clear params. */
3620 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_CLEAR_PARAMS);
3621}
3622
Chad Versacef78d6842015-07-07 15:46:19 -07003623void anv_CmdPushConstants(
3624 VkCmdBuffer cmdBuffer,
3625 VkPipelineLayout layout,
3626 VkShaderStageFlags stageFlags,
3627 uint32_t start,
3628 uint32_t length,
3629 const void* values)
3630{
3631 stub();
3632}
3633
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003634void anv_CmdBeginRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003635 VkCmdBuffer cmdBuffer,
3636 const VkRenderPassBegin* pRenderPassBegin)
3637{
3638 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3639 struct anv_render_pass *pass = (struct anv_render_pass *) pRenderPassBegin->renderPass;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003640 struct anv_framebuffer *framebuffer =
3641 (struct anv_framebuffer *) pRenderPassBegin->framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003642
Jason Ekstrand52940e82015-07-08 10:57:13 -07003643 assert(pRenderPassBegin->contents == VK_RENDER_PASS_CONTENTS_INLINE);
3644
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003645 cmd_buffer->framebuffer = framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003646
Jason Ekstrand22513052015-05-30 10:07:29 -07003647 cmd_buffer->descriptors_dirty |= VK_SHADER_STAGE_FRAGMENT_BIT;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003648
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003649 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DRAWING_RECTANGLE,
3650 .ClippedDrawingRectangleYMin = pass->render_area.offset.y,
3651 .ClippedDrawingRectangleXMin = pass->render_area.offset.x,
3652 .ClippedDrawingRectangleYMax =
3653 pass->render_area.offset.y + pass->render_area.extent.height - 1,
3654 .ClippedDrawingRectangleXMax =
3655 pass->render_area.offset.x + pass->render_area.extent.width - 1,
3656 .DrawingRectangleOriginY = 0,
3657 .DrawingRectangleOriginX = 0);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003658
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003659 anv_cmd_buffer_emit_depth_stencil(cmd_buffer, pass);
3660
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003661 anv_cmd_buffer_clear(cmd_buffer, pass);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003662}
3663
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003664void anv_CmdEndRenderPass(
Jason Ekstranda35fef12015-07-07 16:22:23 -07003665 VkCmdBuffer cmdBuffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003666{
Jason Ekstranda1309c52015-05-13 22:13:05 -07003667 /* Emit a flushing pipe control at the end of a pass. This is kind of a
3668 * hack but it ensures that render targets always actually get written.
3669 * Eventually, we should do flushing based on image format transitions
3670 * or something of that nature.
3671 */
3672 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3673 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3674 .PostSyncOperation = NoWrite,
3675 .RenderTargetCacheFlushEnable = true,
3676 .InstructionCacheInvalidateEnable = true,
3677 .DepthCacheFlushEnable = true,
3678 .VFCacheInvalidationEnable = true,
3679 .TextureCacheInvalidationEnable = true,
3680 .CommandStreamerStallEnable = true);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003681}
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003682
Chad Versacedff32232015-07-07 15:51:55 -07003683void anv_CmdExecuteCommands(
3684 VkCmdBuffer cmdBuffer,
3685 uint32_t cmdBuffersCount,
3686 const VkCmdBuffer* pCmdBuffers)
3687{
3688 stub();
3689}
3690
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003691void vkCmdDbgMarkerBegin(
3692 VkCmdBuffer cmdBuffer,
3693 const char* pMarker)
3694 __attribute__ ((visibility ("default")));
3695
3696void vkCmdDbgMarkerEnd(
3697 VkCmdBuffer cmdBuffer)
3698 __attribute__ ((visibility ("default")));
3699
3700VkResult vkDbgSetObjectTag(
3701 VkDevice device,
3702 VkObject object,
3703 size_t tagSize,
3704 const void* pTag)
3705 __attribute__ ((visibility ("default")));
3706
3707
3708void vkCmdDbgMarkerBegin(
3709 VkCmdBuffer cmdBuffer,
3710 const char* pMarker)
3711{
3712}
3713
3714void vkCmdDbgMarkerEnd(
3715 VkCmdBuffer cmdBuffer)
3716{
3717}
3718
3719VkResult vkDbgSetObjectTag(
3720 VkDevice device,
3721 VkObject object,
3722 size_t tagSize,
3723 const void* pTag)
3724{
3725 return VK_SUCCESS;
3726}