blob: 423f165e400c46cf5cae820867cedb790b5cd195 [file] [log] [blame]
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001/*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "private.h"
31
32static int
33anv_env_get_int(const char *name)
34{
35 const char *val = getenv(name);
36
37 if (!val)
38 return 0;
39
40 return strtol(val, NULL, 0);
41}
42
43static VkResult
44fill_physical_device(struct anv_physical_device *device,
45 struct anv_instance *instance,
46 const char *path)
47{
48 int fd;
49
50 fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
51 if (fd < 0)
52 return vk_error(VK_ERROR_UNAVAILABLE);
53
54 device->instance = instance;
55 device->path = path;
56
57 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
58 device->no_hw = false;
59 if (device->chipset_id) {
60 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
61 device->no_hw = true;
62 } else {
63 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
64 }
65 if (!device->chipset_id)
66 goto fail;
67
68 device->name = brw_get_device_name(device->chipset_id);
69 device->info = brw_get_device_info(device->chipset_id, -1);
70 if (!device->info)
71 goto fail;
72
73 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
74 goto fail;
75
76 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
77 goto fail;
78
79 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
80 goto fail;
81
82 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
83 goto fail;
84
85 close(fd);
86
87 return VK_SUCCESS;
88
89 fail:
90 close(fd);
91
92 return vk_error(VK_ERROR_UNAVAILABLE);
93}
94
95static void *default_alloc(
96 void* pUserData,
97 size_t size,
98 size_t alignment,
99 VkSystemAllocType allocType)
100{
101 return malloc(size);
102}
103
104static void default_free(
105 void* pUserData,
106 void* pMem)
107{
108 free(pMem);
109}
110
111static const VkAllocCallbacks default_alloc_callbacks = {
112 .pUserData = NULL,
113 .pfnAlloc = default_alloc,
114 .pfnFree = default_free
115};
116
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700117VkResult anv_CreateInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700118 const VkInstanceCreateInfo* pCreateInfo,
119 VkInstance* pInstance)
120{
121 struct anv_instance *instance;
122 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
123 void *user_data = NULL;
124 VkResult result;
125
126 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
127
128 if (pCreateInfo->pAllocCb) {
129 alloc_callbacks = pCreateInfo->pAllocCb;
130 user_data = pCreateInfo->pAllocCb->pUserData;
131 }
132 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
133 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
134 if (!instance)
135 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
136
137 instance->pAllocUserData = alloc_callbacks->pUserData;
138 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
139 instance->pfnFree = alloc_callbacks->pfnFree;
140 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
141
142 instance->physicalDeviceCount = 0;
143 result = fill_physical_device(&instance->physicalDevice,
144 instance, "/dev/dri/renderD128");
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700145
Chad Versacea61f3072015-05-20 19:51:10 -0700146 if (result != VK_SUCCESS)
147 return result;
148
149 instance->physicalDeviceCount++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700150 *pInstance = (VkInstance) instance;
151
152 return VK_SUCCESS;
153}
154
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700155VkResult anv_DestroyInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700156 VkInstance _instance)
157{
158 struct anv_instance *instance = (struct anv_instance *) _instance;
159
160 instance->pfnFree(instance->pAllocUserData, instance);
161
162 return VK_SUCCESS;
163}
164
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700165VkResult anv_EnumeratePhysicalDevices(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700166 VkInstance _instance,
167 uint32_t* pPhysicalDeviceCount,
168 VkPhysicalDevice* pPhysicalDevices)
169{
170 struct anv_instance *instance = (struct anv_instance *) _instance;
171
172 if (*pPhysicalDeviceCount >= 1)
173 pPhysicalDevices[0] = (VkPhysicalDevice) &instance->physicalDevice;
174 *pPhysicalDeviceCount = instance->physicalDeviceCount;
175
176 return VK_SUCCESS;
177}
178
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700179VkResult anv_GetPhysicalDeviceInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700180 VkPhysicalDevice physicalDevice,
181 VkPhysicalDeviceInfoType infoType,
182 size_t* pDataSize,
183 void* pData)
184{
185 struct anv_physical_device *device = (struct anv_physical_device *) physicalDevice;
186 VkPhysicalDeviceProperties *properties;
187 VkPhysicalDevicePerformance *performance;
188 VkPhysicalDeviceQueueProperties *queue_properties;
189 VkPhysicalDeviceMemoryProperties *memory_properties;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700190 VkDisplayPropertiesWSI *display_properties;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700191 uint64_t ns_per_tick = 80;
192
Kristian Høgsberga29df712015-05-15 22:04:52 -0700193 switch ((uint32_t) infoType) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700194 case VK_PHYSICAL_DEVICE_INFO_TYPE_PROPERTIES:
195 properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700196
197 *pDataSize = sizeof(*properties);
198 if (pData == NULL)
199 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700200
201 properties->apiVersion = 1;
202 properties->driverVersion = 1;
203 properties->vendorId = 0x8086;
204 properties->deviceId = device->chipset_id;
205 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
206 strcpy(properties->deviceName, device->name);
207 properties->maxInlineMemoryUpdateSize = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700208 properties->maxBoundDescriptorSets = MAX_SETS;
209 properties->maxThreadGroupSize = 512;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700210 properties->timestampFrequency = 1000 * 1000 * 1000 / ns_per_tick;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700211 properties->multiColorAttachmentClears = true;
212 properties->maxDescriptorSets = 8;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700213 properties->maxViewports = 16;
214 properties->maxColorAttachments = 8;
215 return VK_SUCCESS;
216
217 case VK_PHYSICAL_DEVICE_INFO_TYPE_PERFORMANCE:
218 performance = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700219
220 *pDataSize = sizeof(*performance);
221 if (pData == NULL)
222 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700223
224 performance->maxDeviceClock = 1.0;
225 performance->aluPerClock = 1.0;
226 performance->texPerClock = 1.0;
227 performance->primsPerClock = 1.0;
228 performance->pixelsPerClock = 1.0;
229 return VK_SUCCESS;
230
231 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PROPERTIES:
232 queue_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700233
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700234 *pDataSize = sizeof(*queue_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700235 if (pData == NULL)
236 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700237
238 queue_properties->queueFlags = 0;
239 queue_properties->queueCount = 1;
240 queue_properties->maxAtomicCounters = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700241 queue_properties->supportsTimestamps = true;
242 queue_properties->maxMemReferences = 256;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700243 return VK_SUCCESS;
244
245 case VK_PHYSICAL_DEVICE_INFO_TYPE_MEMORY_PROPERTIES:
246 memory_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700247
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700248 *pDataSize = sizeof(*memory_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700249 if (pData == NULL)
250 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700251
252 memory_properties->supportsMigration = false;
253 memory_properties->supportsPinning = false;
254 return VK_SUCCESS;
255
Kristian Høgsberga29df712015-05-15 22:04:52 -0700256 case VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI:
257 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI");
258
259 *pDataSize = sizeof(*display_properties);
260 if (pData == NULL)
261 return VK_SUCCESS;
262
263 display_properties = pData;
264 display_properties->display = 0;
265 display_properties->physicalResolution = (VkExtent2D) { 0, 0 };
266 return VK_SUCCESS;
267
268 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI:
269 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI");
270 return VK_SUCCESS;
271
272
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700273 default:
274 return VK_UNSUPPORTED;
275 }
276
277}
278
279void * vkGetProcAddr(
280 VkPhysicalDevice physicalDevice,
281 const char* pName)
282{
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700283 return anv_lookup_entrypoint(pName);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700284}
285
286static void
287parse_debug_flags(struct anv_device *device)
288{
289 const char *debug, *p, *end;
290
291 debug = getenv("INTEL_DEBUG");
292 device->dump_aub = false;
293 if (debug) {
294 for (p = debug; *p; p = end + 1) {
295 end = strchrnul(p, ',');
296 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
297 device->dump_aub = true;
298 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
299 device->no_hw = true;
300 if (*end == '\0')
301 break;
302 }
303 }
304}
305
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700306static VkResult
307anv_queue_init(struct anv_device *device, struct anv_queue *queue)
308{
309 queue->device = device;
310 queue->pool = &device->surface_state_pool;
311
312 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
313 if (queue->completed_serial.map == NULL)
314 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
315
316 *(uint32_t *)queue->completed_serial.map = 0;
317 queue->next_serial = 1;
318
319 return VK_SUCCESS;
320}
321
322static void
323anv_queue_finish(struct anv_queue *queue)
324{
325#ifdef HAVE_VALGRIND
326 /* This gets torn down with the device so we only need to do this if
327 * valgrind is present.
328 */
329 anv_state_pool_free(queue->pool, queue->completed_serial);
330#endif
331}
332
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700333static void
334anv_device_init_border_colors(struct anv_device *device)
335{
336 float float_border_colors[][4] = {
337 [VK_BORDER_COLOR_OPAQUE_WHITE] = { 1.0, 1.0, 1.0, 1.0 },
338 [VK_BORDER_COLOR_TRANSPARENT_BLACK] = { 0.0, 0.0, 0.0, 0.0 },
339 [VK_BORDER_COLOR_OPAQUE_BLACK] = { 0.0, 0.0, 0.0, 1.0 }
340 };
341
342 uint32_t uint32_border_colors[][4] = {
343 [VK_BORDER_COLOR_OPAQUE_WHITE] = { 1, 1, 1, 1 },
344 [VK_BORDER_COLOR_TRANSPARENT_BLACK] = { 0, 0, 0, 0 },
345 [VK_BORDER_COLOR_OPAQUE_BLACK] = { 0, 0, 0, 1 }
346 };
347
348 device->float_border_colors =
349 anv_state_pool_alloc(&device->dynamic_state_pool,
350 sizeof(float_border_colors), 32);
351 memcpy(device->float_border_colors.map,
352 float_border_colors, sizeof(float_border_colors));
353
354 device->uint32_border_colors =
355 anv_state_pool_alloc(&device->dynamic_state_pool,
356 sizeof(uint32_border_colors), 32);
357 memcpy(device->uint32_border_colors.map,
358 uint32_border_colors, sizeof(uint32_border_colors));
359
360}
361
Jason Ekstrand730ca0e2015-05-28 10:20:18 -0700362static const uint32_t BATCH_SIZE = 8192;
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700363
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700364VkResult anv_CreateDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700365 VkPhysicalDevice _physicalDevice,
366 const VkDeviceCreateInfo* pCreateInfo,
367 VkDevice* pDevice)
368{
369 struct anv_physical_device *physicalDevice =
370 (struct anv_physical_device *) _physicalDevice;
371 struct anv_instance *instance = physicalDevice->instance;
372 struct anv_device *device;
373
374 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
375
376 device = instance->pfnAlloc(instance->pAllocUserData,
377 sizeof(*device), 8,
378 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
379 if (!device)
380 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
381
382 device->no_hw = physicalDevice->no_hw;
383 parse_debug_flags(device);
384
385 device->instance = physicalDevice->instance;
386 device->fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
387 if (device->fd == -1)
388 goto fail_device;
389
390 device->context_id = anv_gem_create_context(device);
391 if (device->context_id == -1)
392 goto fail_fd;
393
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700394 anv_bo_pool_init(&device->batch_bo_pool, device, BATCH_SIZE);
395
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700396 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700397
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700398 anv_state_pool_init(&device->dynamic_state_pool,
399 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700400
401 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
402 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
403
404 anv_state_pool_init(&device->surface_state_pool,
405 &device->surface_state_block_pool);
406
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700407 device->info = *physicalDevice->info;
408
Kristian Høgsberg Kristensen9eab70e2015-06-03 23:03:29 -0700409 device->compiler = anv_compiler_create(device);
410 device->aub_writer = NULL;
411
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700412 pthread_mutex_init(&device->mutex, NULL);
413
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700414 anv_queue_init(device, &device->queue);
415
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -0700416 anv_device_init_meta(device);
417
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700418 anv_device_init_border_colors(device);
419
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700420 *pDevice = (VkDevice) device;
421
422 return VK_SUCCESS;
423
424 fail_fd:
425 close(device->fd);
426 fail_device:
427 anv_device_free(device, device);
428
429 return vk_error(VK_ERROR_UNAVAILABLE);
430}
431
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700432VkResult anv_DestroyDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700433 VkDevice _device)
434{
435 struct anv_device *device = (struct anv_device *) _device;
436
Jason Ekstrand7f90e562015-06-04 09:06:59 -0700437 /* FIXME: We should make device destruction actually safe. */
438 return VK_UNSUPPORTED;
439
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700440 anv_compiler_destroy(device->compiler);
441
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700442 anv_queue_finish(&device->queue);
443
Jason Ekstrand3a38b0d2015-06-09 11:08:51 -0700444 anv_device_finish_meta(device);
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700445
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700446#ifdef HAVE_VALGRIND
447 /* We only need to free these to prevent valgrind errors. The backing
448 * BO will go away in a couple of lines so we don't actually leak.
449 */
450 anv_state_pool_free(&device->dynamic_state_pool,
451 device->float_border_colors);
452 anv_state_pool_free(&device->dynamic_state_pool,
453 device->uint32_border_colors);
454#endif
455
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700456 anv_bo_pool_finish(&device->batch_bo_pool);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700457 anv_block_pool_finish(&device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700458 anv_block_pool_finish(&device->instruction_block_pool);
459 anv_block_pool_finish(&device->surface_state_block_pool);
460
461 close(device->fd);
462
463 if (device->aub_writer)
464 anv_aub_writer_destroy(device->aub_writer);
465
466 anv_device_free(device, device);
467
468 return VK_SUCCESS;
469}
470
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700471VkResult anv_GetGlobalExtensionInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700472 VkExtensionInfoType infoType,
473 uint32_t extensionIndex,
474 size_t* pDataSize,
475 void* pData)
476{
Kristian Høgsberga29df712015-05-15 22:04:52 -0700477 static const VkExtensionProperties extensions[] = {
478 {
479 .extName = "VK_WSI_LunarG",
480 .version = 3
481 }
482 };
483 uint32_t count = ARRAY_SIZE(extensions);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700484
485 switch (infoType) {
486 case VK_EXTENSION_INFO_TYPE_COUNT:
Kristian Høgsberga29df712015-05-15 22:04:52 -0700487 memcpy(pData, &count, sizeof(count));
488 *pDataSize = sizeof(count);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700489 return VK_SUCCESS;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700490
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700491 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
Kristian Høgsberga29df712015-05-15 22:04:52 -0700492 if (extensionIndex >= count)
493 return vk_error(VK_ERROR_INVALID_EXTENSION);
494
495 memcpy(pData, &extensions[extensionIndex], sizeof(extensions[0]));
496 *pDataSize = sizeof(extensions[0]);
497 return VK_SUCCESS;
498
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700499 default:
500 return VK_UNSUPPORTED;
501 }
502}
503
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700504VkResult anv_GetPhysicalDeviceExtensionInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700505 VkPhysicalDevice physicalDevice,
506 VkExtensionInfoType infoType,
507 uint32_t extensionIndex,
508 size_t* pDataSize,
509 void* pData)
510{
511 uint32_t *count;
512
513 switch (infoType) {
514 case VK_EXTENSION_INFO_TYPE_COUNT:
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700515 *pDataSize = 4;
516 if (pData == NULL)
517 return VK_SUCCESS;
518
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700519 count = pData;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700520 *count = 0;
521 return VK_SUCCESS;
522
523 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
524 return vk_error(VK_ERROR_INVALID_EXTENSION);
525
526 default:
527 return VK_UNSUPPORTED;
528 }
529}
530
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700531VkResult anv_EnumerateLayers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700532 VkPhysicalDevice physicalDevice,
533 size_t maxStringSize,
534 size_t* pLayerCount,
535 char* const* pOutLayers,
536 void* pReserved)
537{
538 *pLayerCount = 0;
539
540 return VK_SUCCESS;
541}
542
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700543VkResult anv_GetDeviceQueue(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700544 VkDevice _device,
545 uint32_t queueNodeIndex,
546 uint32_t queueIndex,
547 VkQueue* pQueue)
548{
549 struct anv_device *device = (struct anv_device *) _device;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700550
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700551 assert(queueIndex == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700552
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700553 *pQueue = (VkQueue) &device->queue;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700554
555 return VK_SUCCESS;
556}
557
Jason Ekstrand59def432015-05-27 11:41:28 -0700558VkResult
Jason Ekstrand403266b2015-05-25 17:38:15 -0700559anv_reloc_list_init(struct anv_reloc_list *list, struct anv_device *device)
560{
561 list->num_relocs = 0;
562 list->array_length = 256;
563 list->relocs =
564 anv_device_alloc(device, list->array_length * sizeof(*list->relocs), 8,
565 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
566
567 if (list->relocs == NULL)
568 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
569
570 list->reloc_bos =
571 anv_device_alloc(device, list->array_length * sizeof(*list->reloc_bos), 8,
572 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
573
574 if (list->relocs == NULL) {
575 anv_device_free(device, list->relocs);
576 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
577 }
578
579 return VK_SUCCESS;
580}
581
Jason Ekstrand59def432015-05-27 11:41:28 -0700582void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700583anv_reloc_list_finish(struct anv_reloc_list *list, struct anv_device *device)
584{
585 anv_device_free(device, list->relocs);
586 anv_device_free(device, list->reloc_bos);
587}
588
589static VkResult
590anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
591 size_t num_additional_relocs)
592{
593 if (list->num_relocs + num_additional_relocs <= list->array_length)
594 return VK_SUCCESS;
595
596 size_t new_length = list->array_length * 2;
597 while (new_length < list->num_relocs + num_additional_relocs)
598 new_length *= 2;
599
600 struct drm_i915_gem_relocation_entry *new_relocs =
601 anv_device_alloc(device, new_length * sizeof(*list->relocs), 8,
602 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
603 if (new_relocs == NULL)
604 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
605
606 struct anv_bo **new_reloc_bos =
607 anv_device_alloc(device, new_length * sizeof(*list->reloc_bos), 8,
608 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
609 if (new_relocs == NULL) {
610 anv_device_free(device, new_relocs);
611 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
612 }
613
614 memcpy(new_relocs, list->relocs, list->num_relocs * sizeof(*list->relocs));
615 memcpy(new_reloc_bos, list->reloc_bos,
616 list->num_relocs * sizeof(*list->reloc_bos));
617
618 anv_device_free(device, list->relocs);
619 anv_device_free(device, list->reloc_bos);
620
621 list->relocs = new_relocs;
622 list->reloc_bos = new_reloc_bos;
623
624 return VK_SUCCESS;
625}
626
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700627static VkResult
628anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700629{
630 VkResult result;
631
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700632 struct anv_batch_bo *bbo =
633 anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
634 if (bbo == NULL)
635 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700636
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700637 bbo->num_relocs = 0;
638 bbo->prev_batch_bo = NULL;
639
640 result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700641 if (result != VK_SUCCESS) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700642 anv_device_free(device, bbo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700643 return result;
644 }
645
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700646 *bbo_out = bbo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700647
648 return VK_SUCCESS;
649}
650
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700651static void
652anv_batch_bo_start(struct anv_batch_bo *bbo, struct anv_batch *batch,
653 size_t batch_padding)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700654{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700655 batch->next = batch->start = bbo->bo.map;
656 batch->end = bbo->bo.map + bbo->bo.size - batch_padding;
657 bbo->first_reloc = batch->relocs.num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700658}
659
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700660static void
661anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700662{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700663 assert(batch->start == bbo->bo.map);
664 bbo->length = batch->next - batch->start;
665 bbo->num_relocs = batch->relocs.num_relocs - bbo->first_reloc;
666}
667
668static void
669anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_device *device)
670{
671 anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
672 anv_device_free(device, bbo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700673}
674
675void *
676anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords)
677{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700678 if (batch->next + num_dwords * 4 > batch->end)
679 batch->extend_cb(batch, batch->user_data);
680
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700681 void *p = batch->next;
682
683 batch->next += num_dwords * 4;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700684 assert(batch->next <= batch->end);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700685
686 return p;
687}
688
689static void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700690anv_reloc_list_append(struct anv_reloc_list *list, struct anv_device *device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700691 struct anv_reloc_list *other, uint32_t offset)
692{
Jason Ekstrand403266b2015-05-25 17:38:15 -0700693 anv_reloc_list_grow(list, device, other->num_relocs);
694 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700695
Jason Ekstrand403266b2015-05-25 17:38:15 -0700696 memcpy(&list->relocs[list->num_relocs], &other->relocs[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700697 other->num_relocs * sizeof(other->relocs[0]));
Jason Ekstrand403266b2015-05-25 17:38:15 -0700698 memcpy(&list->reloc_bos[list->num_relocs], &other->reloc_bos[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700699 other->num_relocs * sizeof(other->reloc_bos[0]));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700700
Jason Ekstrand403266b2015-05-25 17:38:15 -0700701 for (uint32_t i = 0; i < other->num_relocs; i++)
702 list->relocs[i + list->num_relocs].offset += offset;
703
704 list->num_relocs += other->num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700705}
706
707static uint64_t
Jason Ekstrand403266b2015-05-25 17:38:15 -0700708anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
709 uint32_t offset, struct anv_bo *target_bo, uint32_t delta)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700710{
711 struct drm_i915_gem_relocation_entry *entry;
712 int index;
713
Jason Ekstrand403266b2015-05-25 17:38:15 -0700714 anv_reloc_list_grow(list, device, 1);
715 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700716
717 /* XXX: Can we use I915_EXEC_HANDLE_LUT? */
718 index = list->num_relocs++;
719 list->reloc_bos[index] = target_bo;
720 entry = &list->relocs[index];
721 entry->target_handle = target_bo->gem_handle;
722 entry->delta = delta;
723 entry->offset = offset;
724 entry->presumed_offset = target_bo->offset;
725 entry->read_domains = 0;
726 entry->write_domain = 0;
727
728 return target_bo->offset + delta;
729}
730
731void
732anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
733{
734 uint32_t size, offset;
735
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700736 size = other->next - other->start;
737 assert(size % 4 == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700738
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700739 if (batch->next + size > batch->end)
740 batch->extend_cb(batch, batch->user_data);
741
742 assert(batch->next + size <= batch->end);
743
744 memcpy(batch->next, other->start, size);
745
746 offset = batch->next - batch->start;
747 anv_reloc_list_append(&batch->relocs, batch->device,
748 &other->relocs, offset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700749
750 batch->next += size;
751}
752
753uint64_t
754anv_batch_emit_reloc(struct anv_batch *batch,
755 void *location, struct anv_bo *bo, uint32_t delta)
756{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700757 return anv_reloc_list_add(&batch->relocs, batch->device,
758 location - batch->start, bo, delta);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700759}
760
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700761VkResult anv_QueueSubmit(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700762 VkQueue _queue,
763 uint32_t cmdBufferCount,
764 const VkCmdBuffer* pCmdBuffers,
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700765 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700766{
767 struct anv_queue *queue = (struct anv_queue *) _queue;
768 struct anv_device *device = queue->device;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700769 struct anv_fence *fence = (struct anv_fence *) _fence;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700770 int ret;
771
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700772 for (uint32_t i = 0; i < cmdBufferCount; i++) {
773 struct anv_cmd_buffer *cmd_buffer =
774 (struct anv_cmd_buffer *) pCmdBuffers[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700775
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700776 if (device->dump_aub)
777 anv_cmd_buffer_dump(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700778
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700779 if (!device->no_hw) {
780 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf);
781 if (ret != 0)
Kristian Høgsberg2b7a0602015-05-12 14:38:58 -0700782 return vk_error(VK_ERROR_UNKNOWN);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700783
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700784 if (fence) {
785 ret = anv_gem_execbuffer(device, &fence->execbuf);
786 if (ret != 0)
787 return vk_error(VK_ERROR_UNKNOWN);
788 }
789
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700790 for (uint32_t i = 0; i < cmd_buffer->bo_count; i++)
791 cmd_buffer->exec2_bos[i]->offset = cmd_buffer->exec2_objects[i].offset;
792 } else {
793 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
794 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700795 }
796
797 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700798}
799
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700800VkResult anv_QueueAddMemReferences(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700801 VkQueue queue,
802 uint32_t count,
803 const VkDeviceMemory* pMems)
804{
805 return VK_SUCCESS;
806}
807
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700808VkResult anv_QueueRemoveMemReferences(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700809 VkQueue queue,
810 uint32_t count,
811 const VkDeviceMemory* pMems)
812{
813 return VK_SUCCESS;
814}
815
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700816VkResult anv_QueueWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700817 VkQueue _queue)
818{
819 struct anv_queue *queue = (struct anv_queue *) _queue;
820
821 return vkDeviceWaitIdle((VkDevice) queue->device);
822}
823
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700824VkResult anv_DeviceWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700825 VkDevice _device)
826{
827 struct anv_device *device = (struct anv_device *) _device;
828 struct anv_state state;
829 struct anv_batch batch;
830 struct drm_i915_gem_execbuffer2 execbuf;
831 struct drm_i915_gem_exec_object2 exec2_objects[1];
832 struct anv_bo *bo = NULL;
833 VkResult result;
834 int64_t timeout;
835 int ret;
836
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700837 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
838 bo = &device->dynamic_state_pool.block_pool->bo;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700839 batch.start = batch.next = state.map;
840 batch.end = state.map + 32;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700841 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
842 anv_batch_emit(&batch, GEN8_MI_NOOP);
843
844 exec2_objects[0].handle = bo->gem_handle;
845 exec2_objects[0].relocation_count = 0;
846 exec2_objects[0].relocs_ptr = 0;
847 exec2_objects[0].alignment = 0;
848 exec2_objects[0].offset = bo->offset;
849 exec2_objects[0].flags = 0;
850 exec2_objects[0].rsvd1 = 0;
851 exec2_objects[0].rsvd2 = 0;
852
853 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
854 execbuf.buffer_count = 1;
855 execbuf.batch_start_offset = state.offset;
856 execbuf.batch_len = batch.next - state.map;
857 execbuf.cliprects_ptr = 0;
858 execbuf.num_cliprects = 0;
859 execbuf.DR1 = 0;
860 execbuf.DR4 = 0;
861
862 execbuf.flags =
863 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
864 execbuf.rsvd1 = device->context_id;
865 execbuf.rsvd2 = 0;
866
867 if (!device->no_hw) {
868 ret = anv_gem_execbuffer(device, &execbuf);
869 if (ret != 0) {
870 result = vk_error(VK_ERROR_UNKNOWN);
871 goto fail;
872 }
873
874 timeout = INT64_MAX;
875 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
876 if (ret != 0) {
877 result = vk_error(VK_ERROR_UNKNOWN);
878 goto fail;
879 }
880 }
881
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700882 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700883
884 return VK_SUCCESS;
885
886 fail:
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700887 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700888
889 return result;
890}
891
892void *
893anv_device_alloc(struct anv_device * device,
894 size_t size,
895 size_t alignment,
896 VkSystemAllocType allocType)
897{
898 return device->instance->pfnAlloc(device->instance->pAllocUserData,
899 size,
900 alignment,
901 allocType);
902}
903
904void
905anv_device_free(struct anv_device * device,
906 void * mem)
907{
908 return device->instance->pfnFree(device->instance->pAllocUserData,
909 mem);
910}
911
912VkResult
913anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
914{
915 bo->gem_handle = anv_gem_create(device, size);
916 if (!bo->gem_handle)
917 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
918
919 bo->map = NULL;
920 bo->index = 0;
921 bo->offset = 0;
922 bo->size = size;
923
924 return VK_SUCCESS;
925}
926
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700927VkResult anv_AllocMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700928 VkDevice _device,
929 const VkMemoryAllocInfo* pAllocInfo,
930 VkDeviceMemory* pMem)
931{
932 struct anv_device *device = (struct anv_device *) _device;
933 struct anv_device_memory *mem;
934 VkResult result;
935
936 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
937
938 mem = anv_device_alloc(device, sizeof(*mem), 8,
939 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
940 if (mem == NULL)
941 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
942
943 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
944 if (result != VK_SUCCESS)
945 goto fail;
946
947 *pMem = (VkDeviceMemory) mem;
948
949 return VK_SUCCESS;
950
951 fail:
952 anv_device_free(device, mem);
953
954 return result;
955}
956
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700957VkResult anv_FreeMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700958 VkDevice _device,
959 VkDeviceMemory _mem)
960{
961 struct anv_device *device = (struct anv_device *) _device;
962 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
963
964 if (mem->bo.map)
965 anv_gem_munmap(mem->bo.map, mem->bo.size);
966
967 if (mem->bo.gem_handle != 0)
968 anv_gem_close(device, mem->bo.gem_handle);
969
970 anv_device_free(device, mem);
971
972 return VK_SUCCESS;
973}
974
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700975VkResult anv_SetMemoryPriority(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700976 VkDevice device,
977 VkDeviceMemory mem,
978 VkMemoryPriority priority)
979{
980 return VK_SUCCESS;
981}
982
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700983VkResult anv_MapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700984 VkDevice _device,
985 VkDeviceMemory _mem,
986 VkDeviceSize offset,
987 VkDeviceSize size,
988 VkMemoryMapFlags flags,
989 void** ppData)
990{
991 struct anv_device *device = (struct anv_device *) _device;
992 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
993
994 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
995 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
996 * at a time is valid. We could just mmap up front and return an offset
997 * pointer here, but that may exhaust virtual memory on 32 bit
998 * userspace. */
999
1000 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
1001 mem->map_size = size;
1002
1003 *ppData = mem->map;
1004
1005 return VK_SUCCESS;
1006}
1007
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001008VkResult anv_UnmapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001009 VkDevice _device,
1010 VkDeviceMemory _mem)
1011{
1012 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1013
1014 anv_gem_munmap(mem->map, mem->map_size);
1015
1016 return VK_SUCCESS;
1017}
1018
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001019VkResult anv_FlushMappedMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001020 VkDevice device,
1021 VkDeviceMemory mem,
1022 VkDeviceSize offset,
1023 VkDeviceSize size)
1024{
1025 /* clflush here for !llc platforms */
1026
1027 return VK_SUCCESS;
1028}
1029
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001030VkResult anv_PinSystemMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001031 VkDevice device,
1032 const void* pSysMem,
1033 size_t memSize,
1034 VkDeviceMemory* pMem)
1035{
1036 return VK_SUCCESS;
1037}
1038
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001039VkResult anv_GetMultiDeviceCompatibility(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001040 VkPhysicalDevice physicalDevice0,
1041 VkPhysicalDevice physicalDevice1,
1042 VkPhysicalDeviceCompatibilityInfo* pInfo)
1043{
1044 return VK_UNSUPPORTED;
1045}
1046
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001047VkResult anv_OpenSharedMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001048 VkDevice device,
1049 const VkMemoryOpenInfo* pOpenInfo,
1050 VkDeviceMemory* pMem)
1051{
1052 return VK_UNSUPPORTED;
1053}
1054
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001055VkResult anv_OpenSharedSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001056 VkDevice device,
1057 const VkSemaphoreOpenInfo* pOpenInfo,
1058 VkSemaphore* pSemaphore)
1059{
1060 return VK_UNSUPPORTED;
1061}
1062
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001063VkResult anv_OpenPeerMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001064 VkDevice device,
1065 const VkPeerMemoryOpenInfo* pOpenInfo,
1066 VkDeviceMemory* pMem)
1067{
1068 return VK_UNSUPPORTED;
1069}
1070
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001071VkResult anv_OpenPeerImage(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001072 VkDevice device,
1073 const VkPeerImageOpenInfo* pOpenInfo,
1074 VkImage* pImage,
1075 VkDeviceMemory* pMem)
1076{
1077 return VK_UNSUPPORTED;
1078}
1079
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001080VkResult anv_DestroyObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001081 VkDevice _device,
1082 VkObjectType objType,
Jason Ekstrand57153da2015-05-22 15:15:08 -07001083 VkObject _object)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001084{
1085 struct anv_device *device = (struct anv_device *) _device;
Jason Ekstrand57153da2015-05-22 15:15:08 -07001086 struct anv_object *object = (struct anv_object *) _object;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001087
Jason Ekstrand57153da2015-05-22 15:15:08 -07001088 switch (objType) {
1089 case VK_OBJECT_TYPE_INSTANCE:
1090 return anv_DestroyInstance((VkInstance) _object);
1091
1092 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
1093 /* We don't want to actually destroy physical devices */
1094 return VK_SUCCESS;
1095
1096 case VK_OBJECT_TYPE_DEVICE:
1097 assert(_device == (VkDevice) _object);
1098 return anv_DestroyDevice((VkDevice) _object);
1099
1100 case VK_OBJECT_TYPE_QUEUE:
1101 /* TODO */
1102 return VK_SUCCESS;
1103
1104 case VK_OBJECT_TYPE_DEVICE_MEMORY:
1105 return anv_FreeMemory(_device, (VkDeviceMemory) _object);
1106
1107 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
1108 /* These are just dummys anyway, so we don't need to destroy them */
1109 return VK_SUCCESS;
1110
1111 case VK_OBJECT_TYPE_BUFFER:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001112 case VK_OBJECT_TYPE_IMAGE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001113 case VK_OBJECT_TYPE_DEPTH_STENCIL_VIEW:
1114 case VK_OBJECT_TYPE_SHADER:
1115 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
1116 case VK_OBJECT_TYPE_SAMPLER:
1117 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
1118 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
1119 case VK_OBJECT_TYPE_DYNAMIC_RS_STATE:
1120 case VK_OBJECT_TYPE_DYNAMIC_CB_STATE:
1121 case VK_OBJECT_TYPE_DYNAMIC_DS_STATE:
1122 case VK_OBJECT_TYPE_RENDER_PASS:
1123 /* These are trivially destroyable */
1124 anv_device_free(device, (void *) _object);
1125 return VK_SUCCESS;
1126
1127 case VK_OBJECT_TYPE_COMMAND_BUFFER:
1128 case VK_OBJECT_TYPE_PIPELINE:
1129 case VK_OBJECT_TYPE_DYNAMIC_VP_STATE:
1130 case VK_OBJECT_TYPE_FENCE:
1131 case VK_OBJECT_TYPE_QUERY_POOL:
1132 case VK_OBJECT_TYPE_FRAMEBUFFER:
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001133 case VK_OBJECT_TYPE_BUFFER_VIEW:
1134 case VK_OBJECT_TYPE_IMAGE_VIEW:
1135 case VK_OBJECT_TYPE_COLOR_ATTACHMENT_VIEW:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001136 (object->destructor)(device, object, objType);
1137 return VK_SUCCESS;
1138
1139 case VK_OBJECT_TYPE_SEMAPHORE:
1140 case VK_OBJECT_TYPE_EVENT:
1141 stub_return(VK_UNSUPPORTED);
1142
1143 default:
1144 unreachable("Invalid object type");
1145 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001146}
1147
1148static void
1149fill_memory_requirements(
1150 VkObjectType objType,
1151 VkObject object,
1152 VkMemoryRequirements * memory_requirements)
1153{
1154 struct anv_buffer *buffer;
1155 struct anv_image *image;
1156
1157 memory_requirements->memPropsAllowed =
1158 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1159 VK_MEMORY_PROPERTY_HOST_DEVICE_COHERENT_BIT |
1160 /* VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT | */
1161 VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT |
1162 VK_MEMORY_PROPERTY_PREFER_HOST_LOCAL |
1163 VK_MEMORY_PROPERTY_SHAREABLE_BIT;
1164
1165 memory_requirements->memPropsRequired = 0;
1166
1167 switch (objType) {
1168 case VK_OBJECT_TYPE_BUFFER:
1169 buffer = (struct anv_buffer *) object;
1170 memory_requirements->size = buffer->size;
1171 memory_requirements->alignment = 16;
1172 break;
1173 case VK_OBJECT_TYPE_IMAGE:
1174 image = (struct anv_image *) object;
1175 memory_requirements->size = image->size;
1176 memory_requirements->alignment = image->alignment;
1177 break;
1178 default:
1179 memory_requirements->size = 0;
1180 break;
1181 }
1182}
1183
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001184static uint32_t
1185get_allocation_count(VkObjectType objType)
1186{
1187 switch (objType) {
1188 case VK_OBJECT_TYPE_BUFFER:
1189 case VK_OBJECT_TYPE_IMAGE:
1190 return 1;
1191 default:
1192 return 0;
1193 }
1194}
1195
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001196VkResult anv_GetObjectInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001197 VkDevice _device,
1198 VkObjectType objType,
1199 VkObject object,
1200 VkObjectInfoType infoType,
1201 size_t* pDataSize,
1202 void* pData)
1203{
1204 VkMemoryRequirements memory_requirements;
Kristian Høgsberg05754542015-05-18 08:50:04 -07001205 uint32_t *count;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001206
1207 switch (infoType) {
1208 case VK_OBJECT_INFO_TYPE_MEMORY_REQUIREMENTS:
Kristian Høgsberg783e6212015-05-17 19:22:52 -07001209 *pDataSize = sizeof(memory_requirements);
1210 if (pData == NULL)
1211 return VK_SUCCESS;
1212
Kristian Høgsberg05754542015-05-18 08:50:04 -07001213 fill_memory_requirements(objType, object, pData);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001214 return VK_SUCCESS;
1215
1216 case VK_OBJECT_INFO_TYPE_MEMORY_ALLOCATION_COUNT:
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001217 *pDataSize = sizeof(count);
1218 if (pData == NULL)
1219 return VK_SUCCESS;
1220
Kristian Høgsberg05754542015-05-18 08:50:04 -07001221 count = pData;
1222 *count = get_allocation_count(objType);
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001223 return VK_SUCCESS;
1224
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001225 default:
1226 return VK_UNSUPPORTED;
1227 }
1228
1229}
1230
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001231VkResult anv_QueueBindObjectMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001232 VkQueue queue,
1233 VkObjectType objType,
1234 VkObject object,
1235 uint32_t allocationIdx,
1236 VkDeviceMemory _mem,
1237 VkDeviceSize memOffset)
1238{
1239 struct anv_buffer *buffer;
1240 struct anv_image *image;
1241 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1242
1243 switch (objType) {
1244 case VK_OBJECT_TYPE_BUFFER:
1245 buffer = (struct anv_buffer *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001246 buffer->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001247 buffer->offset = memOffset;
1248 break;
1249 case VK_OBJECT_TYPE_IMAGE:
1250 image = (struct anv_image *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001251 image->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001252 image->offset = memOffset;
1253 break;
1254 default:
1255 break;
1256 }
1257
1258 return VK_SUCCESS;
1259}
1260
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001261VkResult anv_QueueBindObjectMemoryRange(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001262 VkQueue queue,
1263 VkObjectType objType,
1264 VkObject object,
1265 uint32_t allocationIdx,
1266 VkDeviceSize rangeOffset,
1267 VkDeviceSize rangeSize,
1268 VkDeviceMemory mem,
1269 VkDeviceSize memOffset)
1270{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001271 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001272}
1273
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001274VkResult anv_QueueBindImageMemoryRange(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001275 VkQueue queue,
1276 VkImage image,
1277 uint32_t allocationIdx,
1278 const VkImageMemoryBindInfo* pBindInfo,
1279 VkDeviceMemory mem,
1280 VkDeviceSize memOffset)
1281{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001282 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001283}
1284
Jason Ekstrand57153da2015-05-22 15:15:08 -07001285static void
1286anv_fence_destroy(struct anv_device *device,
1287 struct anv_object *object,
1288 VkObjectType obj_type)
1289{
1290 struct anv_fence *fence = (struct anv_fence *) object;
1291
1292 assert(obj_type == VK_OBJECT_TYPE_FENCE);
1293
1294 anv_gem_munmap(fence->bo.map, fence->bo.size);
1295 anv_gem_close(device, fence->bo.gem_handle);
1296 anv_device_free(device, fence);
1297}
1298
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001299VkResult anv_CreateFence(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001300 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001301 const VkFenceCreateInfo* pCreateInfo,
1302 VkFence* pFence)
1303{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001304 struct anv_device *device = (struct anv_device *) _device;
1305 struct anv_fence *fence;
1306 struct anv_batch batch;
1307 VkResult result;
1308
1309 const uint32_t fence_size = 128;
1310
1311 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1312
1313 fence = anv_device_alloc(device, sizeof(*fence), 8,
1314 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1315 if (fence == NULL)
1316 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1317
1318 result = anv_bo_init_new(&fence->bo, device, fence_size);
1319 if (result != VK_SUCCESS)
1320 goto fail;
1321
Jason Ekstrand57153da2015-05-22 15:15:08 -07001322 fence->base.destructor = anv_fence_destroy;
1323
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001324 fence->bo.map =
1325 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07001326 batch.next = batch.start = fence->bo.map;
1327 batch.end = fence->bo.map + fence->bo.size;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001328 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1329 anv_batch_emit(&batch, GEN8_MI_NOOP);
1330
1331 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1332 fence->exec2_objects[0].relocation_count = 0;
1333 fence->exec2_objects[0].relocs_ptr = 0;
1334 fence->exec2_objects[0].alignment = 0;
1335 fence->exec2_objects[0].offset = fence->bo.offset;
1336 fence->exec2_objects[0].flags = 0;
1337 fence->exec2_objects[0].rsvd1 = 0;
1338 fence->exec2_objects[0].rsvd2 = 0;
1339
1340 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1341 fence->execbuf.buffer_count = 1;
1342 fence->execbuf.batch_start_offset = 0;
1343 fence->execbuf.batch_len = batch.next - fence->bo.map;
1344 fence->execbuf.cliprects_ptr = 0;
1345 fence->execbuf.num_cliprects = 0;
1346 fence->execbuf.DR1 = 0;
1347 fence->execbuf.DR4 = 0;
1348
1349 fence->execbuf.flags =
1350 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1351 fence->execbuf.rsvd1 = device->context_id;
1352 fence->execbuf.rsvd2 = 0;
1353
Chad Versace87d98e12015-06-04 14:31:53 -07001354 *pFence = (VkFence) fence;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001355
1356 return VK_SUCCESS;
1357
1358 fail:
1359 anv_device_free(device, fence);
1360
1361 return result;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001362}
1363
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001364VkResult anv_ResetFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001365 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001366 uint32_t fenceCount,
1367 VkFence* pFences)
1368{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001369 struct anv_fence **fences = (struct anv_fence **) pFences;
1370
Kristian Høgsberg Kristensen52637c02015-06-05 11:51:30 -07001371 for (uint32_t i = 0; i < fenceCount; i++)
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001372 fences[i]->ready = false;
1373
1374 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001375}
1376
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001377VkResult anv_GetFenceStatus(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001378 VkDevice _device,
1379 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001380{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001381 struct anv_device *device = (struct anv_device *) _device;
1382 struct anv_fence *fence = (struct anv_fence *) _fence;
1383 int64_t t = 0;
1384 int ret;
1385
1386 if (fence->ready)
1387 return VK_SUCCESS;
1388
1389 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1390 if (ret == 0) {
1391 fence->ready = true;
1392 return VK_SUCCESS;
1393 }
1394
1395 return VK_NOT_READY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001396}
1397
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001398VkResult anv_WaitForFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001399 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001400 uint32_t fenceCount,
1401 const VkFence* pFences,
1402 bool32_t waitAll,
1403 uint64_t timeout)
1404{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001405 struct anv_device *device = (struct anv_device *) _device;
1406 struct anv_fence **fences = (struct anv_fence **) pFences;
1407 int64_t t = timeout;
1408 int ret;
1409
1410 /* FIXME: handle !waitAll */
1411
1412 for (uint32_t i = 0; i < fenceCount; i++) {
1413 ret = anv_gem_wait(device, fences[i]->bo.gem_handle, &t);
1414 if (ret == -1 && errno == ETIME)
1415 return VK_TIMEOUT;
1416 else if (ret == -1)
1417 return vk_error(VK_ERROR_UNKNOWN);
1418 }
1419
1420 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001421}
1422
1423// Queue semaphore functions
1424
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001425VkResult anv_CreateSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001426 VkDevice device,
1427 const VkSemaphoreCreateInfo* pCreateInfo,
1428 VkSemaphore* pSemaphore)
1429{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001430 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001431}
1432
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001433VkResult anv_QueueSignalSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001434 VkQueue queue,
1435 VkSemaphore semaphore)
1436{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001437 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001438}
1439
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001440VkResult anv_QueueWaitSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001441 VkQueue queue,
1442 VkSemaphore semaphore)
1443{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001444 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001445}
1446
1447// Event functions
1448
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001449VkResult anv_CreateEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001450 VkDevice device,
1451 const VkEventCreateInfo* pCreateInfo,
1452 VkEvent* pEvent)
1453{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001454 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001455}
1456
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001457VkResult anv_GetEventStatus(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001458 VkDevice device,
1459 VkEvent event)
1460{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001461 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001462}
1463
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001464VkResult anv_SetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001465 VkDevice device,
1466 VkEvent event)
1467{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001468 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001469}
1470
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001471VkResult anv_ResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001472 VkDevice device,
1473 VkEvent event)
1474{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001475 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001476}
1477
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001478// Buffer functions
1479
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001480VkResult anv_CreateBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001481 VkDevice _device,
1482 const VkBufferCreateInfo* pCreateInfo,
1483 VkBuffer* pBuffer)
1484{
1485 struct anv_device *device = (struct anv_device *) _device;
1486 struct anv_buffer *buffer;
1487
1488 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1489
1490 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1491 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1492 if (buffer == NULL)
1493 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1494
1495 buffer->size = pCreateInfo->size;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001496 buffer->bo = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001497 buffer->offset = 0;
1498
1499 *pBuffer = (VkBuffer) buffer;
1500
1501 return VK_SUCCESS;
1502}
1503
1504// Buffer view functions
1505
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001506static void
1507fill_buffer_surface_state(void *state, VkFormat format,
1508 uint32_t offset, uint32_t range)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001509{
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001510 const struct anv_format *info;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001511
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001512 info = anv_format_for_vk_format(format);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001513 /* This assumes RGBA float format. */
1514 uint32_t stride = 4;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001515 uint32_t num_elements = range / stride;
1516
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001517 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1518 .SurfaceType = SURFTYPE_BUFFER,
1519 .SurfaceArray = false,
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001520 .SurfaceFormat = info->format,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001521 .SurfaceVerticalAlignment = VALIGN4,
1522 .SurfaceHorizontalAlignment = HALIGN4,
1523 .TileMode = LINEAR,
1524 .VerticalLineStride = 0,
1525 .VerticalLineStrideOffset = 0,
1526 .SamplerL2BypassModeDisable = true,
1527 .RenderCacheReadWriteMode = WriteOnlyCache,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07001528 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001529 .BaseMipLevel = 0,
1530 .SurfaceQPitch = 0,
1531 .Height = (num_elements >> 7) & 0x3fff,
1532 .Width = num_elements & 0x7f,
1533 .Depth = (num_elements >> 21) & 0x3f,
1534 .SurfacePitch = stride - 1,
1535 .MinimumArrayElement = 0,
1536 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1537 .XOffset = 0,
1538 .YOffset = 0,
1539 .SurfaceMinLOD = 0,
1540 .MIPCountLOD = 0,
1541 .AuxiliarySurfaceMode = AUX_NONE,
1542 .RedClearColor = 0,
1543 .GreenClearColor = 0,
1544 .BlueClearColor = 0,
1545 .AlphaClearColor = 0,
1546 .ShaderChannelSelectRed = SCS_RED,
1547 .ShaderChannelSelectGreen = SCS_GREEN,
1548 .ShaderChannelSelectBlue = SCS_BLUE,
1549 .ShaderChannelSelectAlpha = SCS_ALPHA,
1550 .ResourceMinLOD = 0,
1551 /* FIXME: We assume that the image must be bound at this time. */
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001552 .SurfaceBaseAddress = { NULL, offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001553 };
1554
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001555 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1556}
1557
1558VkResult anv_CreateBufferView(
1559 VkDevice _device,
1560 const VkBufferViewCreateInfo* pCreateInfo,
1561 VkBufferView* pView)
1562{
1563 struct anv_device *device = (struct anv_device *) _device;
1564 struct anv_buffer *buffer = (struct anv_buffer *) pCreateInfo->buffer;
1565 struct anv_surface_view *view;
1566
1567 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1568
1569 view = anv_device_alloc(device, sizeof(*view), 8,
1570 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1571 if (view == NULL)
1572 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1573
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001574 view->base.destructor = anv_surface_view_destroy;
1575
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001576 view->bo = buffer->bo;
1577 view->offset = buffer->offset + pCreateInfo->offset;
1578 view->surface_state =
1579 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1580 view->format = pCreateInfo->format;
1581 view->range = pCreateInfo->range;
1582
1583 fill_buffer_surface_state(view->surface_state.map,
1584 pCreateInfo->format, view->offset, pCreateInfo->range);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001585
Chad Versace87d98e12015-06-04 14:31:53 -07001586 *pView = (VkBufferView) view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001587
1588 return VK_SUCCESS;
1589}
1590
1591// Sampler functions
1592
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001593VkResult anv_CreateSampler(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001594 VkDevice _device,
1595 const VkSamplerCreateInfo* pCreateInfo,
1596 VkSampler* pSampler)
1597{
1598 struct anv_device *device = (struct anv_device *) _device;
1599 struct anv_sampler *sampler;
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001600 uint32_t mag_filter, min_filter, max_anisotropy;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001601
Kristian Høgsberg18acfa72015-05-13 13:53:01 -07001602 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001603
1604 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1605 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1606 if (!sampler)
1607 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1608
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001609 static const uint32_t vk_to_gen_tex_filter[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001610 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1611 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001612 };
1613
1614 static const uint32_t vk_to_gen_mipmap_mode[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001615 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1616 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1617 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001618 };
1619
1620 static const uint32_t vk_to_gen_tex_address[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001621 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1622 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1623 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1624 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1625 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001626 };
1627
1628 static const uint32_t vk_to_gen_compare_op[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001629 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1630 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1631 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1632 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1633 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1634 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1635 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1636 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001637 };
1638
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001639 if (pCreateInfo->maxAnisotropy > 1) {
1640 mag_filter = MAPFILTER_ANISOTROPIC;
1641 min_filter = MAPFILTER_ANISOTROPIC;
1642 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1643 } else {
1644 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1645 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1646 max_anisotropy = RATIO21;
1647 }
1648
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001649 struct GEN8_SAMPLER_STATE sampler_state = {
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001650 .SamplerDisable = false,
1651 .TextureBorderColorMode = DX10OGL,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001652 .LODPreClampMode = 0,
1653 .BaseMipLevel = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001654 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001655 .MagModeFilter = mag_filter,
1656 .MinModeFilter = min_filter,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001657 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1658 .AnisotropicAlgorithm = EWAApproximation,
1659 .MinLOD = pCreateInfo->minLod * 256,
1660 .MaxLOD = pCreateInfo->maxLod * 256,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001661 .ChromaKeyEnable = 0,
1662 .ChromaKeyIndex = 0,
1663 .ChromaKeyMode = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001664 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001665 .CubeSurfaceControlMode = 0,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001666
1667 .IndirectStatePointer =
1668 device->float_border_colors.offset +
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001669 pCreateInfo->borderColor * sizeof(float) * 4,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001670
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001671 .LODClampMagnificationMode = MIPNONE,
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001672 .MaximumAnisotropy = max_anisotropy,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001673 .RAddressMinFilterRoundingEnable = 0,
1674 .RAddressMagFilterRoundingEnable = 0,
1675 .VAddressMinFilterRoundingEnable = 0,
1676 .VAddressMagFilterRoundingEnable = 0,
1677 .UAddressMinFilterRoundingEnable = 0,
1678 .UAddressMagFilterRoundingEnable = 0,
1679 .TrilinearFilterQuality = 0,
1680 .NonnormalizedCoordinateEnable = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001681 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1682 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1683 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001684 };
1685
1686 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1687
1688 *pSampler = (VkSampler) sampler;
1689
1690 return VK_SUCCESS;
1691}
1692
1693// Descriptor set functions
1694
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001695VkResult anv_CreateDescriptorSetLayout(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001696 VkDevice _device,
1697 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1698 VkDescriptorSetLayout* pSetLayout)
1699{
1700 struct anv_device *device = (struct anv_device *) _device;
1701 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001702
1703 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1704
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001705 uint32_t sampler_count[VK_NUM_SHADER_STAGE] = { 0, };
1706 uint32_t surface_count[VK_NUM_SHADER_STAGE] = { 0, };
1707 uint32_t num_dynamic_buffers = 0;
1708 uint32_t count = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07001709 uint32_t stages = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001710 uint32_t s;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001711
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001712 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001713 switch (pCreateInfo->pBinding[i].descriptorType) {
1714 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001715 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001716 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1717 sampler_count[s] += pCreateInfo->pBinding[i].count;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001718 break;
1719 default:
1720 break;
1721 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001722
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001723 switch (pCreateInfo->pBinding[i].descriptorType) {
1724 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001725 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1726 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1727 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1728 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1729 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1730 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1731 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1732 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001733 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1734 surface_count[s] += pCreateInfo->pBinding[i].count;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001735 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001736 default:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001737 break;
1738 }
1739
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001740 switch (pCreateInfo->pBinding[i].descriptorType) {
1741 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1742 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001743 num_dynamic_buffers += pCreateInfo->pBinding[i].count;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001744 break;
1745 default:
1746 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001747 }
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001748
Jason Ekstrand22513052015-05-30 10:07:29 -07001749 stages |= pCreateInfo->pBinding[i].stageFlags;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001750 count += pCreateInfo->pBinding[i].count;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001751 }
1752
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001753 uint32_t sampler_total = 0;
1754 uint32_t surface_total = 0;
1755 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
1756 sampler_total += sampler_count[s];
1757 surface_total += surface_count[s];
1758 }
1759
1760 size_t size = sizeof(*set_layout) +
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001761 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001762 set_layout = anv_device_alloc(device, size, 8,
1763 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1764 if (!set_layout)
1765 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1766
1767 set_layout->num_dynamic_buffers = num_dynamic_buffers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001768 set_layout->count = count;
Jason Ekstrand22513052015-05-30 10:07:29 -07001769 set_layout->shader_stages = stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001770
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001771 struct anv_descriptor_slot *p = set_layout->entries;
1772 struct anv_descriptor_slot *sampler[VK_NUM_SHADER_STAGE];
1773 struct anv_descriptor_slot *surface[VK_NUM_SHADER_STAGE];
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001774 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
1775 set_layout->stage[s].surface_count = surface_count[s];
1776 set_layout->stage[s].surface_start = surface[s] = p;
1777 p += surface_count[s];
1778 set_layout->stage[s].sampler_count = sampler_count[s];
1779 set_layout->stage[s].sampler_start = sampler[s] = p;
1780 p += sampler_count[s];
1781 }
1782
1783 uint32_t descriptor = 0;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001784 int8_t dynamic_slot = 0;
1785 bool is_dynamic;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001786 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1787 switch (pCreateInfo->pBinding[i].descriptorType) {
1788 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001789 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1790 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001791 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].count; j++) {
1792 sampler[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001793 sampler[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001794 sampler[s]++;
1795 }
1796 break;
1797 default:
1798 break;
1799 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001800
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001801 switch (pCreateInfo->pBinding[i].descriptorType) {
1802 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1803 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001804 is_dynamic = true;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001805 break;
1806 default:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001807 is_dynamic = false;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001808 break;
1809 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001810
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001811 switch (pCreateInfo->pBinding[i].descriptorType) {
1812 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001813 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1814 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1815 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1816 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1817 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1818 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1819 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1820 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1821 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1822 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].count; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001823 surface[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001824 if (is_dynamic)
1825 surface[s]->dynamic_slot = dynamic_slot + j;
1826 else
1827 surface[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001828 surface[s]++;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001829 }
1830 break;
1831 default:
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001832 break;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001833 }
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001834
1835 if (is_dynamic)
1836 dynamic_slot += pCreateInfo->pBinding[i].count;
1837
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001838 descriptor += pCreateInfo->pBinding[i].count;
1839 }
1840
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001841 *pSetLayout = (VkDescriptorSetLayout) set_layout;
1842
1843 return VK_SUCCESS;
1844}
1845
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001846VkResult anv_BeginDescriptorPoolUpdate(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001847 VkDevice device,
1848 VkDescriptorUpdateMode updateMode)
1849{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001850 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001851}
1852
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001853VkResult anv_EndDescriptorPoolUpdate(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001854 VkDevice device,
1855 VkCmdBuffer cmd)
1856{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001857 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001858}
1859
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001860VkResult anv_CreateDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001861 VkDevice device,
1862 VkDescriptorPoolUsage poolUsage,
1863 uint32_t maxSets,
1864 const VkDescriptorPoolCreateInfo* pCreateInfo,
1865 VkDescriptorPool* pDescriptorPool)
1866{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001867 *pDescriptorPool = 1;
1868
1869 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001870}
1871
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001872VkResult anv_ResetDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001873 VkDevice device,
1874 VkDescriptorPool descriptorPool)
1875{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001876 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001877}
1878
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001879VkResult anv_AllocDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001880 VkDevice _device,
1881 VkDescriptorPool descriptorPool,
1882 VkDescriptorSetUsage setUsage,
1883 uint32_t count,
1884 const VkDescriptorSetLayout* pSetLayouts,
1885 VkDescriptorSet* pDescriptorSets,
1886 uint32_t* pCount)
1887{
1888 struct anv_device *device = (struct anv_device *) _device;
1889 const struct anv_descriptor_set_layout *layout;
1890 struct anv_descriptor_set *set;
1891 size_t size;
1892
1893 for (uint32_t i = 0; i < count; i++) {
1894 layout = (struct anv_descriptor_set_layout *) pSetLayouts[i];
Kristian Høgsberga77229c2015-05-13 11:49:30 -07001895 size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001896 set = anv_device_alloc(device, size, 8,
1897 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1898 if (!set) {
1899 *pCount = i;
1900 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1901 }
1902
Jason Ekstrand0a547512015-05-21 16:33:04 -07001903 /* Descriptor sets may not be 100% filled out so we need to memset to
1904 * ensure that we can properly detect and handle holes.
1905 */
1906 memset(set, 0, size);
1907
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001908 pDescriptorSets[i] = (VkDescriptorSet) set;
1909 }
1910
1911 *pCount = count;
1912
Kristian Høgsbergb4b3bd12015-05-17 18:39:12 -07001913 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001914}
1915
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001916void anv_ClearDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001917 VkDevice device,
1918 VkDescriptorPool descriptorPool,
1919 uint32_t count,
1920 const VkDescriptorSet* pDescriptorSets)
1921{
1922}
1923
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001924void anv_UpdateDescriptors(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001925 VkDevice _device,
1926 VkDescriptorSet descriptorSet,
1927 uint32_t updateCount,
1928 const void** ppUpdateArray)
1929{
1930 struct anv_descriptor_set *set = (struct anv_descriptor_set *) descriptorSet;
1931 VkUpdateSamplers *update_samplers;
1932 VkUpdateSamplerTextures *update_sampler_textures;
1933 VkUpdateImages *update_images;
1934 VkUpdateBuffers *update_buffers;
1935 VkUpdateAsCopy *update_as_copy;
1936
1937 for (uint32_t i = 0; i < updateCount; i++) {
1938 const struct anv_common *common = ppUpdateArray[i];
1939
1940 switch (common->sType) {
1941 case VK_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1942 update_samplers = (VkUpdateSamplers *) common;
1943
1944 for (uint32_t j = 0; j < update_samplers->count; j++) {
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001945 set->descriptors[update_samplers->binding + j].sampler =
1946 (struct anv_sampler *) update_samplers->pSamplers[j];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001947 }
1948 break;
1949
1950 case VK_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1951 /* FIXME: Shouldn't this be *_UPDATE_SAMPLER_IMAGES? */
1952 update_sampler_textures = (VkUpdateSamplerTextures *) common;
1953
1954 for (uint32_t j = 0; j < update_sampler_textures->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001955 set->descriptors[update_sampler_textures->binding + j].view =
1956 (struct anv_surface_view *)
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001957 update_sampler_textures->pSamplerImageViews[j].pImageView->view;
1958 set->descriptors[update_sampler_textures->binding + j].sampler =
1959 (struct anv_sampler *)
1960 update_sampler_textures->pSamplerImageViews[j].sampler;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001961 }
1962 break;
1963
1964 case VK_STRUCTURE_TYPE_UPDATE_IMAGES:
1965 update_images = (VkUpdateImages *) common;
1966
1967 for (uint32_t j = 0; j < update_images->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001968 set->descriptors[update_images->binding + j].view =
1969 (struct anv_surface_view *) update_images->pImageViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001970 }
1971 break;
1972
1973 case VK_STRUCTURE_TYPE_UPDATE_BUFFERS:
1974 update_buffers = (VkUpdateBuffers *) common;
1975
1976 for (uint32_t j = 0; j < update_buffers->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001977 set->descriptors[update_buffers->binding + j].view =
1978 (struct anv_surface_view *) update_buffers->pBufferViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001979 }
1980 /* FIXME: descriptor arrays? */
1981 break;
1982
1983 case VK_STRUCTURE_TYPE_UPDATE_AS_COPY:
1984 update_as_copy = (VkUpdateAsCopy *) common;
1985 (void) update_as_copy;
1986 break;
1987
1988 default:
1989 break;
1990 }
1991 }
1992}
1993
1994// State object functions
1995
1996static inline int64_t
1997clamp_int64(int64_t x, int64_t min, int64_t max)
1998{
1999 if (x < min)
2000 return min;
2001 else if (x < max)
2002 return x;
2003 else
2004 return max;
2005}
2006
Jason Ekstrand57153da2015-05-22 15:15:08 -07002007static void
2008anv_dynamic_vp_state_destroy(struct anv_device *device,
2009 struct anv_object *object,
2010 VkObjectType obj_type)
2011{
2012 struct anv_dynamic_vp_state *state = (void *)object;
2013
2014 assert(obj_type == VK_OBJECT_TYPE_DYNAMIC_VP_STATE);
2015
2016 anv_state_pool_free(&device->dynamic_state_pool, state->sf_clip_vp);
2017 anv_state_pool_free(&device->dynamic_state_pool, state->cc_vp);
2018 anv_state_pool_free(&device->dynamic_state_pool, state->scissor);
2019
2020 anv_device_free(device, state);
2021}
2022
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002023VkResult anv_CreateDynamicViewportState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002024 VkDevice _device,
2025 const VkDynamicVpStateCreateInfo* pCreateInfo,
2026 VkDynamicVpState* pState)
2027{
2028 struct anv_device *device = (struct anv_device *) _device;
2029 struct anv_dynamic_vp_state *state;
2030
2031 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO);
2032
2033 state = anv_device_alloc(device, sizeof(*state), 8,
2034 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2035 if (state == NULL)
2036 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2037
Jason Ekstrand57153da2015-05-22 15:15:08 -07002038 state->base.destructor = anv_dynamic_vp_state_destroy;
2039
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002040 unsigned count = pCreateInfo->viewportAndScissorCount;
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002041 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002042 count * 64, 64);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002043 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002044 count * 8, 32);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002045 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002046 count * 32, 32);
2047
2048 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
2049 const VkViewport *vp = &pCreateInfo->pViewports[i];
2050 const VkRect *s = &pCreateInfo->pScissors[i];
2051
2052 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
2053 .ViewportMatrixElementm00 = vp->width / 2,
2054 .ViewportMatrixElementm11 = vp->height / 2,
2055 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
2056 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
2057 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
2058 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
2059 .XMinClipGuardband = -1.0f,
2060 .XMaxClipGuardband = 1.0f,
2061 .YMinClipGuardband = -1.0f,
2062 .YMaxClipGuardband = 1.0f,
2063 .XMinViewPort = vp->originX,
2064 .XMaxViewPort = vp->originX + vp->width - 1,
2065 .YMinViewPort = vp->originY,
2066 .YMaxViewPort = vp->originY + vp->height - 1,
2067 };
2068
2069 struct GEN8_CC_VIEWPORT cc_viewport = {
2070 .MinimumDepth = vp->minDepth,
2071 .MaximumDepth = vp->maxDepth
2072 };
2073
2074 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
2075 * ymax < ymin for empty clips. In case clip x, y, width height are all
2076 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
2077 * what we want. Just special case empty clips and produce a canonical
2078 * empty clip. */
2079 static const struct GEN8_SCISSOR_RECT empty_scissor = {
2080 .ScissorRectangleYMin = 1,
2081 .ScissorRectangleXMin = 1,
2082 .ScissorRectangleYMax = 0,
2083 .ScissorRectangleXMax = 0
2084 };
2085
2086 const int max = 0xffff;
2087 struct GEN8_SCISSOR_RECT scissor = {
2088 /* Do this math using int64_t so overflow gets clamped correctly. */
2089 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
2090 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
2091 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
2092 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
2093 };
2094
2095 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
2096 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
2097
2098 if (s->extent.width <= 0 || s->extent.height <= 0) {
2099 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
2100 } else {
2101 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
2102 }
2103 }
2104
2105 *pState = (VkDynamicVpState) state;
2106
2107 return VK_SUCCESS;
2108}
2109
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002110VkResult anv_CreateDynamicRasterState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002111 VkDevice _device,
2112 const VkDynamicRsStateCreateInfo* pCreateInfo,
2113 VkDynamicRsState* pState)
2114{
2115 struct anv_device *device = (struct anv_device *) _device;
2116 struct anv_dynamic_rs_state *state;
2117
2118 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RS_STATE_CREATE_INFO);
2119
2120 state = anv_device_alloc(device, sizeof(*state), 8,
2121 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2122 if (state == NULL)
2123 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2124
2125 /* Missing these:
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002126 * float pointFadeThreshold;
2127 * // optional (GL45) - Size of point fade threshold
2128 */
2129
2130 struct GEN8_3DSTATE_SF sf = {
2131 GEN8_3DSTATE_SF_header,
2132 .LineWidth = pCreateInfo->lineWidth,
2133 .PointWidth = pCreateInfo->pointSize,
2134 };
2135
2136 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
2137
Kristian Høgsberg99883772015-05-26 09:40:10 -07002138 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
2139 pCreateInfo->slopeScaledDepthBias != 0.0f;
2140 struct GEN8_3DSTATE_RASTER raster = {
2141 .GlobalDepthOffsetEnableSolid = enable_bias,
2142 .GlobalDepthOffsetEnableWireframe = enable_bias,
2143 .GlobalDepthOffsetEnablePoint = enable_bias,
2144 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
2145 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
2146 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
2147 };
2148
2149 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
2150
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002151 *pState = (VkDynamicRsState) state;
2152
2153 return VK_SUCCESS;
2154}
2155
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002156VkResult anv_CreateDynamicColorBlendState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002157 VkDevice _device,
2158 const VkDynamicCbStateCreateInfo* pCreateInfo,
2159 VkDynamicCbState* pState)
2160{
2161 struct anv_device *device = (struct anv_device *) _device;
2162 struct anv_dynamic_cb_state *state;
2163
2164 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_CB_STATE_CREATE_INFO);
2165
2166 state = anv_device_alloc(device, sizeof(*state), 8,
2167 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2168 if (state == NULL)
2169 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2170
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002171 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2172 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2173 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2174 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2175 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2176 };
2177
2178 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2179
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002180 *pState = (VkDynamicCbState) state;
2181
2182 return VK_SUCCESS;
2183}
2184
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002185VkResult anv_CreateDynamicDepthStencilState(
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002186 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002187 const VkDynamicDsStateCreateInfo* pCreateInfo,
2188 VkDynamicDsState* pState)
2189{
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002190 struct anv_device *device = (struct anv_device *) _device;
2191 struct anv_dynamic_ds_state *state;
2192
2193 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DS_STATE_CREATE_INFO);
2194
2195 state = anv_device_alloc(device, sizeof(*state), 8,
2196 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2197 if (state == NULL)
2198 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2199
2200 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2201 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2202
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002203 /* Is this what we need to do? */
2204 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2205
Jason Ekstrand251aea82015-06-03 16:59:13 -07002206 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2207 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002208
Jason Ekstrand251aea82015-06-03 16:59:13 -07002209 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2210 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002211 };
2212
2213 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2214 &wm_depth_stencil);
2215
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002216 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2217 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2218 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2219 };
2220
2221 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2222
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002223 *pState = (VkDynamicDsState) state;
2224
2225 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002226}
2227
2228// Command buffer functions
2229
Jason Ekstrand57153da2015-05-22 15:15:08 -07002230static void
2231anv_cmd_buffer_destroy(struct anv_device *device,
2232 struct anv_object *object,
2233 VkObjectType obj_type)
2234{
2235 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) object;
2236
2237 assert(obj_type == VK_OBJECT_TYPE_COMMAND_BUFFER);
2238
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002239 /* Destroy all of the batch buffers */
2240 struct anv_batch_bo *bbo = cmd_buffer->last_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002241 while (bbo) {
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002242 struct anv_batch_bo *prev = bbo->prev_batch_bo;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002243 anv_batch_bo_destroy(bbo, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002244 bbo = prev;
2245 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002246 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002247
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002248 /* Destroy all of the surface state buffers */
2249 bbo = cmd_buffer->surface_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002250 while (bbo) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002251 struct anv_batch_bo *prev = bbo->prev_batch_bo;
2252 anv_batch_bo_destroy(bbo, device);
2253 bbo = prev;
2254 }
Jason Ekstrand403266b2015-05-25 17:38:15 -07002255 anv_reloc_list_finish(&cmd_buffer->surface_relocs, device);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002256
Jason Ekstrand57153da2015-05-22 15:15:08 -07002257 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
2258 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
Jason Ekstrand57153da2015-05-22 15:15:08 -07002259 anv_device_free(device, cmd_buffer->exec2_objects);
2260 anv_device_free(device, cmd_buffer->exec2_bos);
2261 anv_device_free(device, cmd_buffer);
2262}
2263
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002264static VkResult
2265anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
2266{
2267 struct anv_cmd_buffer *cmd_buffer = _data;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002268
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002269 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->last_batch_bo;
2270
2271 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2272 if (result != VK_SUCCESS)
2273 return result;
2274
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002275 /* We set the end of the batch a little short so we would be sure we
2276 * have room for the chaining command. Since we're about to emit the
2277 * chaining command, let's set it back where it should go.
2278 */
2279 batch->end += GEN8_MI_BATCH_BUFFER_START_length * 4;
2280 assert(batch->end == old_bbo->bo.map + old_bbo->bo.size);
2281
2282 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_START,
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002283 GEN8_MI_BATCH_BUFFER_START_header,
2284 ._2ndLevelBatchBuffer = _1stlevelbatch,
2285 .AddressSpaceIndicator = ASI_PPGTT,
2286 .BatchBufferStartAddress = { &new_bbo->bo, 0 },
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002287 );
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002288
2289 /* Pad out to a 2-dword aligned boundary with zeros */
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002290 if ((uintptr_t)batch->next % 8 != 0) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002291 *(uint32_t *)batch->next = 0;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002292 batch->next += 4;
2293 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002294
2295 anv_batch_bo_finish(cmd_buffer->last_batch_bo, batch);
2296
2297 new_bbo->prev_batch_bo = old_bbo;
2298 cmd_buffer->last_batch_bo = new_bbo;
2299
2300 anv_batch_bo_start(new_bbo, batch, GEN8_MI_BATCH_BUFFER_START_length * 4);
2301
2302 return VK_SUCCESS;
2303}
2304
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002305VkResult anv_CreateCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002306 VkDevice _device,
2307 const VkCmdBufferCreateInfo* pCreateInfo,
2308 VkCmdBuffer* pCmdBuffer)
2309{
2310 struct anv_device *device = (struct anv_device *) _device;
2311 struct anv_cmd_buffer *cmd_buffer;
2312 VkResult result;
2313
2314 cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
2315 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2316 if (cmd_buffer == NULL)
2317 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2318
Jason Ekstrand57153da2015-05-22 15:15:08 -07002319 cmd_buffer->base.destructor = anv_cmd_buffer_destroy;
2320
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002321 cmd_buffer->device = device;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002322 cmd_buffer->rs_state = NULL;
2323 cmd_buffer->vp_state = NULL;
Kristian Høgsberg Kristensen5744d172015-06-02 22:51:42 -07002324 cmd_buffer->cb_state = NULL;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002325 memset(&cmd_buffer->descriptors, 0, sizeof(cmd_buffer->descriptors));
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002326
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002327 result = anv_batch_bo_create(device, &cmd_buffer->last_batch_bo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002328 if (result != VK_SUCCESS)
2329 goto fail;
2330
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002331 result = anv_reloc_list_init(&cmd_buffer->batch.relocs, device);
2332 if (result != VK_SUCCESS)
2333 goto fail_batch_bo;
2334
2335 cmd_buffer->batch.device = device;
2336 cmd_buffer->batch.extend_cb = anv_cmd_buffer_chain_batch;
2337 cmd_buffer->batch.user_data = cmd_buffer;
2338
2339 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2340 GEN8_MI_BATCH_BUFFER_START_length * 4);
2341
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002342 result = anv_batch_bo_create(device, &cmd_buffer->surface_batch_bo);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002343 if (result != VK_SUCCESS)
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002344 goto fail_batch_relocs;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002345 cmd_buffer->surface_batch_bo->first_reloc = 0;
2346
2347 result = anv_reloc_list_init(&cmd_buffer->surface_relocs, device);
2348 if (result != VK_SUCCESS)
2349 goto fail_ss_batch_bo;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002350
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002351 /* Start surface_next at 1 so surface offset 0 is invalid. */
2352 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002353
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002354 cmd_buffer->exec2_objects = NULL;
2355 cmd_buffer->exec2_bos = NULL;
2356 cmd_buffer->exec2_array_length = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002357
2358 anv_state_stream_init(&cmd_buffer->surface_state_stream,
2359 &device->surface_state_block_pool);
Kristian Høgsberga1ec7892015-05-13 13:51:08 -07002360 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002361 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002362
2363 cmd_buffer->dirty = 0;
2364 cmd_buffer->vb_dirty = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07002365 cmd_buffer->descriptors_dirty = 0;
Jason Ekstrandae8c93e2015-05-25 17:08:11 -07002366 cmd_buffer->pipeline = NULL;
Kristian Høgsberg Kristensen5a317ef2015-05-27 21:45:23 -07002367 cmd_buffer->vp_state = NULL;
2368 cmd_buffer->rs_state = NULL;
2369 cmd_buffer->ds_state = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002370
2371 *pCmdBuffer = (VkCmdBuffer) cmd_buffer;
2372
2373 return VK_SUCCESS;
2374
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002375 fail_ss_batch_bo:
2376 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, device);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002377 fail_batch_relocs:
2378 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
2379 fail_batch_bo:
2380 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002381 fail:
2382 anv_device_free(device, cmd_buffer);
2383
2384 return result;
2385}
2386
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002387static void
2388anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer *cmd_buffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002389{
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002390 struct anv_device *device = cmd_buffer->device;
2391
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002392 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_BASE_ADDRESS,
2393 .GeneralStateBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002394 .GeneralStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002395 .GeneralStateBaseAddressModifyEnable = true,
2396 .GeneralStateBufferSize = 0xfffff,
2397 .GeneralStateBufferSizeModifyEnable = true,
2398
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002399 .SurfaceStateBaseAddress = { &cmd_buffer->surface_batch_bo->bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002400 .SurfaceStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002401 .SurfaceStateBaseAddressModifyEnable = true,
2402
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002403 .DynamicStateBaseAddress = { &device->dynamic_state_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002404 .DynamicStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002405 .DynamicStateBaseAddressModifyEnable = true,
2406 .DynamicStateBufferSize = 0xfffff,
2407 .DynamicStateBufferSizeModifyEnable = true,
2408
2409 .IndirectObjectBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002410 .IndirectObjectMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002411 .IndirectObjectBaseAddressModifyEnable = true,
2412 .IndirectObjectBufferSize = 0xfffff,
2413 .IndirectObjectBufferSizeModifyEnable = true,
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002414
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002415 .InstructionBaseAddress = { &device->instruction_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002416 .InstructionMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002417 .InstructionBaseAddressModifyEnable = true,
2418 .InstructionBufferSize = 0xfffff,
2419 .InstructionBuffersizeModifyEnable = true);
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002420}
2421
2422VkResult anv_BeginCommandBuffer(
2423 VkCmdBuffer cmdBuffer,
2424 const VkCmdBufferBeginInfo* pBeginInfo)
2425{
2426 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2427
2428 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
2429 .PipelineSelection = _3D);
2430 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_SIP);
2431
2432 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002433
2434 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VF_STATISTICS,
2435 .StatisticsEnable = true);
2436 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HS, .Enable = false);
2437 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_TE, .TEEnable = false);
2438 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DS, .FunctionEnable = false);
2439 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STREAMOUT, .SOFunctionEnable = false);
2440
2441 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_VS,
2442 .ConstantBufferOffset = 0,
2443 .ConstantBufferSize = 4);
2444 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_GS,
2445 .ConstantBufferOffset = 4,
2446 .ConstantBufferSize = 4);
2447 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_PS,
2448 .ConstantBufferOffset = 8,
2449 .ConstantBufferSize = 4);
2450
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002451 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_WM_CHROMAKEY,
2452 .ChromaKeyKillEnable = false);
2453 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SBE_SWIZ);
2454 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_AA_LINE_PARAMETERS);
2455
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002456 return VK_SUCCESS;
2457}
2458
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002459static VkResult
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002460anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002461 struct anv_bo *bo,
2462 struct drm_i915_gem_relocation_entry *relocs,
2463 size_t num_relocs)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002464{
2465 struct drm_i915_gem_exec_object2 *obj;
2466
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002467 if (bo->index < cmd_buffer->bo_count &&
2468 cmd_buffer->exec2_bos[bo->index] == bo)
2469 return VK_SUCCESS;
2470
2471 if (cmd_buffer->bo_count >= cmd_buffer->exec2_array_length) {
2472 uint32_t new_len = cmd_buffer->exec2_objects ?
2473 cmd_buffer->exec2_array_length * 2 : 64;
2474
2475 struct drm_i915_gem_exec_object2 *new_objects =
2476 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_objects),
2477 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2478 if (new_objects == NULL)
2479 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2480
2481 struct anv_bo **new_bos =
2482 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_bos),
2483 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2484 if (new_objects == NULL) {
2485 anv_device_free(cmd_buffer->device, new_objects);
2486 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2487 }
2488
2489 if (cmd_buffer->exec2_objects) {
2490 memcpy(new_objects, cmd_buffer->exec2_objects,
2491 cmd_buffer->bo_count * sizeof(*new_objects));
2492 memcpy(new_bos, cmd_buffer->exec2_bos,
2493 cmd_buffer->bo_count * sizeof(*new_bos));
2494 }
2495
2496 cmd_buffer->exec2_objects = new_objects;
2497 cmd_buffer->exec2_bos = new_bos;
2498 cmd_buffer->exec2_array_length = new_len;
2499 }
2500
2501 assert(cmd_buffer->bo_count < cmd_buffer->exec2_array_length);
2502
2503 bo->index = cmd_buffer->bo_count++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002504 obj = &cmd_buffer->exec2_objects[bo->index];
2505 cmd_buffer->exec2_bos[bo->index] = bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002506
2507 obj->handle = bo->gem_handle;
2508 obj->relocation_count = 0;
2509 obj->relocs_ptr = 0;
2510 obj->alignment = 0;
2511 obj->offset = bo->offset;
2512 obj->flags = 0;
2513 obj->rsvd1 = 0;
2514 obj->rsvd2 = 0;
2515
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002516 if (relocs) {
2517 obj->relocation_count = num_relocs;
2518 obj->relocs_ptr = (uintptr_t) relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002519 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002520
2521 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002522}
2523
2524static void
2525anv_cmd_buffer_add_validate_bos(struct anv_cmd_buffer *cmd_buffer,
2526 struct anv_reloc_list *list)
2527{
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002528 for (size_t i = 0; i < list->num_relocs; i++)
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002529 anv_cmd_buffer_add_bo(cmd_buffer, list->reloc_bos[i], NULL, 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002530}
2531
2532static void
2533anv_cmd_buffer_process_relocs(struct anv_cmd_buffer *cmd_buffer,
2534 struct anv_reloc_list *list)
2535{
2536 struct anv_bo *bo;
2537
2538 /* If the kernel supports I915_EXEC_NO_RELOC, it will compare offset in
2539 * struct drm_i915_gem_exec_object2 against the bos current offset and if
2540 * all bos haven't moved it will skip relocation processing alltogether.
2541 * If I915_EXEC_NO_RELOC is not supported, the kernel ignores the incoming
2542 * value of offset so we can set it either way. For that to work we need
2543 * to make sure all relocs use the same presumed offset.
2544 */
2545
2546 for (size_t i = 0; i < list->num_relocs; i++) {
2547 bo = list->reloc_bos[i];
2548 if (bo->offset != list->relocs[i].presumed_offset)
2549 cmd_buffer->need_reloc = true;
2550
2551 list->relocs[i].target_handle = bo->index;
2552 }
2553}
2554
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002555VkResult anv_EndCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002556 VkCmdBuffer cmdBuffer)
2557{
2558 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2559 struct anv_device *device = cmd_buffer->device;
2560 struct anv_batch *batch = &cmd_buffer->batch;
2561
2562 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
2563
2564 /* Round batch up to an even number of dwords. */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002565 if ((batch->next - batch->start) & 4)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002566 anv_batch_emit(batch, GEN8_MI_NOOP);
2567
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002568 anv_batch_bo_finish(cmd_buffer->last_batch_bo, &cmd_buffer->batch);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002569 cmd_buffer->surface_batch_bo->num_relocs =
2570 cmd_buffer->surface_relocs.num_relocs - cmd_buffer->surface_batch_bo->first_reloc;
2571 cmd_buffer->surface_batch_bo->length = cmd_buffer->surface_next;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002572
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002573 cmd_buffer->bo_count = 0;
2574 cmd_buffer->need_reloc = false;
2575
2576 /* Lock for access to bo->index. */
2577 pthread_mutex_lock(&device->mutex);
2578
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002579 /* Add surface state bos first so we can add them with their relocs. */
2580 for (struct anv_batch_bo *bbo = cmd_buffer->surface_batch_bo;
2581 bbo != NULL; bbo = bbo->prev_batch_bo) {
2582 anv_cmd_buffer_add_bo(cmd_buffer, &bbo->bo,
2583 &cmd_buffer->surface_relocs.relocs[bbo->first_reloc],
2584 bbo->num_relocs);
2585 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002586
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002587 /* Add all of the BOs referenced by surface state */
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002588 anv_cmd_buffer_add_validate_bos(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002589
2590 /* Add all but the first batch BO */
2591 struct anv_batch_bo *batch_bo = cmd_buffer->last_batch_bo;
2592 while (batch_bo->prev_batch_bo) {
2593 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2594 &batch->relocs.relocs[batch_bo->first_reloc],
2595 batch_bo->num_relocs);
2596 batch_bo = batch_bo->prev_batch_bo;
2597 }
2598
2599 /* Add everything referenced by the batches */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002600 anv_cmd_buffer_add_validate_bos(cmd_buffer, &batch->relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002601
2602 /* Add the first batch bo last */
2603 assert(batch_bo->prev_batch_bo == NULL && batch_bo->first_reloc == 0);
2604 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2605 &batch->relocs.relocs[batch_bo->first_reloc],
2606 batch_bo->num_relocs);
2607 assert(batch_bo->bo.index == cmd_buffer->bo_count - 1);
2608
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002609 anv_cmd_buffer_process_relocs(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002610 anv_cmd_buffer_process_relocs(cmd_buffer, &batch->relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002611
2612 cmd_buffer->execbuf.buffers_ptr = (uintptr_t) cmd_buffer->exec2_objects;
2613 cmd_buffer->execbuf.buffer_count = cmd_buffer->bo_count;
2614 cmd_buffer->execbuf.batch_start_offset = 0;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002615 cmd_buffer->execbuf.batch_len = batch->next - batch->start;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002616 cmd_buffer->execbuf.cliprects_ptr = 0;
2617 cmd_buffer->execbuf.num_cliprects = 0;
2618 cmd_buffer->execbuf.DR1 = 0;
2619 cmd_buffer->execbuf.DR4 = 0;
2620
2621 cmd_buffer->execbuf.flags = I915_EXEC_HANDLE_LUT;
2622 if (!cmd_buffer->need_reloc)
2623 cmd_buffer->execbuf.flags |= I915_EXEC_NO_RELOC;
2624 cmd_buffer->execbuf.flags |= I915_EXEC_RENDER;
2625 cmd_buffer->execbuf.rsvd1 = device->context_id;
2626 cmd_buffer->execbuf.rsvd2 = 0;
2627
2628 pthread_mutex_unlock(&device->mutex);
2629
2630 return VK_SUCCESS;
2631}
2632
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002633VkResult anv_ResetCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002634 VkCmdBuffer cmdBuffer)
2635{
2636 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2637
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002638 /* Delete all but the first batch bo */
2639 while (cmd_buffer->last_batch_bo->prev_batch_bo) {
2640 struct anv_batch_bo *prev = cmd_buffer->last_batch_bo->prev_batch_bo;
2641 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, cmd_buffer->device);
2642 cmd_buffer->last_batch_bo = prev;
2643 }
2644 assert(cmd_buffer->last_batch_bo->prev_batch_bo == NULL);
2645
2646 cmd_buffer->batch.relocs.num_relocs = 0;
2647 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2648 GEN8_MI_BATCH_BUFFER_START_length * 4);
2649
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002650 /* Delete all but the first batch bo */
2651 while (cmd_buffer->surface_batch_bo->prev_batch_bo) {
2652 struct anv_batch_bo *prev = cmd_buffer->surface_batch_bo->prev_batch_bo;
2653 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, cmd_buffer->device);
2654 cmd_buffer->surface_batch_bo = prev;
2655 }
2656 assert(cmd_buffer->surface_batch_bo->prev_batch_bo == NULL);
2657
2658 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002659 cmd_buffer->surface_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002660
2661 return VK_SUCCESS;
2662}
2663
2664// Command buffer building functions
2665
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002666void anv_CmdBindPipeline(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002667 VkCmdBuffer cmdBuffer,
2668 VkPipelineBindPoint pipelineBindPoint,
2669 VkPipeline _pipeline)
2670{
2671 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002672 struct anv_pipeline *pipeline = (struct anv_pipeline *) _pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002673
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002674 cmd_buffer->pipeline = pipeline;
2675 cmd_buffer->vb_dirty |= pipeline->vb_used;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002676 cmd_buffer->dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2677}
2678
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002679void anv_CmdBindDynamicStateObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002680 VkCmdBuffer cmdBuffer,
2681 VkStateBindPoint stateBindPoint,
2682 VkDynamicStateObject dynamicState)
2683{
2684 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2685 struct anv_dynamic_vp_state *vp_state;
2686
2687 switch (stateBindPoint) {
2688 case VK_STATE_BIND_POINT_VIEWPORT:
2689 vp_state = (struct anv_dynamic_vp_state *) dynamicState;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002690 /* We emit state immediately, but set cmd_buffer->vp_state to indicate
2691 * that vp state has been set in this command buffer. */
2692 cmd_buffer->vp_state = vp_state;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002693 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SCISSOR_STATE_POINTERS,
2694 .ScissorRectPointer = vp_state->scissor.offset);
2695 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_CC,
2696 .CCViewportPointer = vp_state->cc_vp.offset);
2697 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_SF_CLIP,
2698 .SFClipViewportPointer = vp_state->sf_clip_vp.offset);
2699 break;
2700 case VK_STATE_BIND_POINT_RASTER:
2701 cmd_buffer->rs_state = (struct anv_dynamic_rs_state *) dynamicState;
2702 cmd_buffer->dirty |= ANV_CMD_BUFFER_RS_DIRTY;
2703 break;
2704 case VK_STATE_BIND_POINT_COLOR_BLEND:
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07002705 cmd_buffer->cb_state = (struct anv_dynamic_cb_state *) dynamicState;
2706 cmd_buffer->dirty |= ANV_CMD_BUFFER_CB_DIRTY;
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002707 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002708 case VK_STATE_BIND_POINT_DEPTH_STENCIL:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002709 cmd_buffer->ds_state = (struct anv_dynamic_ds_state *) dynamicState;
2710 cmd_buffer->dirty |= ANV_CMD_BUFFER_DS_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002711 break;
2712 default:
2713 break;
2714 };
2715}
2716
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002717static struct anv_state
2718anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer *cmd_buffer,
2719 uint32_t size, uint32_t alignment)
2720{
2721 struct anv_state state;
2722
2723 state.offset = ALIGN_U32(cmd_buffer->surface_next, alignment);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002724 if (state.offset + size > cmd_buffer->surface_batch_bo->bo.size)
2725 return (struct anv_state) { 0 };
2726
2727 state.map = cmd_buffer->surface_batch_bo->bo.map + state.offset;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002728 state.alloc_size = size;
2729 cmd_buffer->surface_next = state.offset + size;
2730
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002731 assert(state.offset + size <= cmd_buffer->surface_batch_bo->bo.size);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002732
2733 return state;
2734}
2735
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002736static VkResult
2737anv_cmd_buffer_new_surface_state_bo(struct anv_cmd_buffer *cmd_buffer)
2738{
2739 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->surface_batch_bo;
2740
2741 /* Finish off the old buffer */
2742 old_bbo->num_relocs =
2743 cmd_buffer->surface_relocs.num_relocs - old_bbo->first_reloc;
2744 old_bbo->length = cmd_buffer->surface_next;
2745
2746 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2747 if (result != VK_SUCCESS)
2748 return result;
2749
2750 new_bbo->first_reloc = cmd_buffer->surface_relocs.num_relocs;
2751 cmd_buffer->surface_next = 1;
2752
2753 new_bbo->prev_batch_bo = old_bbo;
2754 cmd_buffer->surface_batch_bo = new_bbo;
2755
2756 /* Re-emit state base addresses so we get the new surface state base
2757 * address before we start emitting binding tables etc.
2758 */
2759 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
2760
Jason Ekstrande497ac22015-05-30 18:04:48 -07002761 /* It seems like just changing the state base addresses isn't enough.
2762 * Invalidating the cache seems to be enough to cause things to
2763 * propagate. However, I'm not 100% sure what we're supposed to do.
Jason Ekstrand33cccbb2015-05-30 08:02:52 -07002764 */
2765 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
2766 .TextureCacheInvalidationEnable = true);
2767
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002768 return VK_SUCCESS;
2769}
2770
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002771void anv_CmdBindDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002772 VkCmdBuffer cmdBuffer,
2773 VkPipelineBindPoint pipelineBindPoint,
2774 uint32_t firstSet,
2775 uint32_t setCount,
2776 const VkDescriptorSet* pDescriptorSets,
2777 uint32_t dynamicOffsetCount,
2778 const uint32_t* pDynamicOffsets)
2779{
2780 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002781 struct anv_pipeline_layout *layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002782 struct anv_descriptor_set *set;
2783 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002784
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002785 assert(firstSet + setCount < MAX_SETS);
2786
2787 uint32_t dynamic_slot = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002788 for (uint32_t i = 0; i < setCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002789 set = (struct anv_descriptor_set *) pDescriptorSets[i];
2790 set_layout = layout->set[firstSet + i].layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002791
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002792 cmd_buffer->descriptors[firstSet + i].set = set;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002793
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002794 assert(set_layout->num_dynamic_buffers <
2795 ARRAY_SIZE(cmd_buffer->descriptors[0].dynamic_offsets));
2796 memcpy(cmd_buffer->descriptors[firstSet + i].dynamic_offsets,
2797 pDynamicOffsets + dynamic_slot,
2798 set_layout->num_dynamic_buffers * sizeof(*pDynamicOffsets));
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002799
Jason Ekstrand22513052015-05-30 10:07:29 -07002800 cmd_buffer->descriptors_dirty |= set_layout->shader_stages;
2801
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002802 dynamic_slot += set_layout->num_dynamic_buffers;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002803 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002804}
2805
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002806void anv_CmdBindIndexBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002807 VkCmdBuffer cmdBuffer,
2808 VkBuffer _buffer,
2809 VkDeviceSize offset,
2810 VkIndexType indexType)
2811{
2812 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2813 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
2814
2815 static const uint32_t vk_to_gen_index_type[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002816 [VK_INDEX_TYPE_UINT8] = INDEX_BYTE,
2817 [VK_INDEX_TYPE_UINT16] = INDEX_WORD,
2818 [VK_INDEX_TYPE_UINT32] = INDEX_DWORD,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002819 };
2820
2821 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
2822 .IndexFormat = vk_to_gen_index_type[indexType],
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002823 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002824 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002825 .BufferSize = buffer->size - offset);
2826}
2827
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002828void anv_CmdBindVertexBuffers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002829 VkCmdBuffer cmdBuffer,
2830 uint32_t startBinding,
2831 uint32_t bindingCount,
2832 const VkBuffer* pBuffers,
2833 const VkDeviceSize* pOffsets)
2834{
2835 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002836 struct anv_vertex_binding *vb = cmd_buffer->vertex_bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002837
2838 /* We have to defer setting up vertex buffer since we need the buffer
2839 * stride from the pipeline. */
2840
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002841 assert(startBinding + bindingCount < MAX_VBS);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002842 for (uint32_t i = 0; i < bindingCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002843 vb[startBinding + i].buffer = (struct anv_buffer *) pBuffers[i];
2844 vb[startBinding + i].offset = pOffsets[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002845 cmd_buffer->vb_dirty |= 1 << (startBinding + i);
2846 }
2847}
2848
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002849static VkResult
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002850cmd_buffer_emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
2851 unsigned stage)
2852{
2853 struct anv_pipeline_layout *layout = cmd_buffer->pipeline->layout;
2854 uint32_t color_attachments, bias, size;
2855 struct anv_state bt_state;
2856
2857 if (stage == VK_SHADER_STAGE_FRAGMENT) {
2858 bias = MAX_RTS;
2859 color_attachments = cmd_buffer->framebuffer->color_attachment_count;
2860 } else {
2861 bias = 0;
2862 color_attachments = 0;
2863 }
2864
2865 /* This is a little awkward: layout can be NULL but we still have to
2866 * allocate and set a binding table for the PS stage for render
2867 * targets. */
2868 uint32_t surface_count = layout ? layout->stage[stage].surface_count : 0;
2869
2870 if (color_attachments + surface_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002871 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002872
2873 size = (bias + surface_count) * sizeof(uint32_t);
2874 bt_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
2875 uint32_t *bt_map = bt_state.map;
2876
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002877 if (bt_state.map == NULL)
2878 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2879
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002880 static const uint32_t binding_table_opcodes[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002881 [VK_SHADER_STAGE_VERTEX] = 38,
2882 [VK_SHADER_STAGE_TESS_CONTROL] = 39,
2883 [VK_SHADER_STAGE_TESS_EVALUATION] = 40,
2884 [VK_SHADER_STAGE_GEOMETRY] = 41,
2885 [VK_SHADER_STAGE_FRAGMENT] = 42,
2886 [VK_SHADER_STAGE_COMPUTE] = 0,
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002887 };
2888
2889 anv_batch_emit(&cmd_buffer->batch,
2890 GEN8_3DSTATE_BINDING_TABLE_POINTERS_VS,
2891 ._3DCommandSubOpcode = binding_table_opcodes[stage],
2892 .PointertoVSBindingTable = bt_state.offset);
2893
2894 for (uint32_t ca = 0; ca < color_attachments; ca++) {
2895 const struct anv_surface_view *view =
2896 cmd_buffer->framebuffer->color_attachments[ca];
2897
2898 struct anv_state state =
2899 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2900
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002901 if (state.map == NULL)
2902 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2903
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002904 memcpy(state.map, view->surface_state.map, 64);
2905
2906 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2907 *(uint64_t *)(state.map + 8 * 4) =
2908 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2909 cmd_buffer->device,
2910 state.offset + 8 * 4,
2911 view->bo, view->offset);
2912
2913 bt_map[ca] = state.offset;
2914 }
2915
2916 if (layout == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002917 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002918
2919 for (uint32_t set = 0; set < layout->num_sets; set++) {
2920 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2921 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2922 struct anv_descriptor_slot *surface_slots =
2923 set_layout->stage[stage].surface_start;
2924
2925 uint32_t start = bias + layout->set[set].surface_start[stage];
2926
2927 for (uint32_t b = 0; b < set_layout->stage[stage].surface_count; b++) {
2928 struct anv_surface_view *view =
2929 d->set->descriptors[surface_slots[b].index].view;
2930
Jason Ekstrand03ffa9c2015-05-29 20:43:10 -07002931 if (!view)
2932 continue;
2933
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002934 struct anv_state state =
2935 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2936
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002937 if (state.map == NULL)
2938 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2939
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002940 uint32_t offset;
2941 if (surface_slots[b].dynamic_slot >= 0) {
2942 uint32_t dynamic_offset =
2943 d->dynamic_offsets[surface_slots[b].dynamic_slot];
2944
2945 offset = view->offset + dynamic_offset;
2946 fill_buffer_surface_state(state.map, view->format, offset,
2947 view->range - dynamic_offset);
2948 } else {
2949 offset = view->offset;
2950 memcpy(state.map, view->surface_state.map, 64);
2951 }
2952
2953 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2954 *(uint64_t *)(state.map + 8 * 4) =
2955 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2956 cmd_buffer->device,
2957 state.offset + 8 * 4,
2958 view->bo, offset);
2959
2960 bt_map[start + b] = state.offset;
2961 }
2962 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002963
2964 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002965}
2966
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002967static VkResult
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002968cmd_buffer_emit_samplers(struct anv_cmd_buffer *cmd_buffer, unsigned stage)
2969{
2970 struct anv_pipeline_layout *layout = cmd_buffer->pipeline->layout;
2971 struct anv_state state;
2972
2973 if (!layout)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002974 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002975
2976 uint32_t sampler_count = layout->stage[stage].sampler_count;
2977
2978 if (sampler_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002979 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002980
2981 uint32_t size = sampler_count * 16;
2982 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream, size, 32);
2983
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002984 if (state.map == NULL)
2985 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2986
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002987 static const uint32_t sampler_state_opcodes[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002988 [VK_SHADER_STAGE_VERTEX] = 43,
2989 [VK_SHADER_STAGE_TESS_CONTROL] = 44, /* HS */
2990 [VK_SHADER_STAGE_TESS_EVALUATION] = 45, /* DS */
2991 [VK_SHADER_STAGE_GEOMETRY] = 46,
2992 [VK_SHADER_STAGE_FRAGMENT] = 47,
2993 [VK_SHADER_STAGE_COMPUTE] = 0,
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002994 };
2995
2996 anv_batch_emit(&cmd_buffer->batch,
2997 GEN8_3DSTATE_SAMPLER_STATE_POINTERS_VS,
2998 ._3DCommandSubOpcode = sampler_state_opcodes[stage],
2999 .PointertoVSSamplerState = state.offset);
3000
3001 for (uint32_t set = 0; set < layout->num_sets; set++) {
3002 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
3003 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
3004 struct anv_descriptor_slot *sampler_slots =
3005 set_layout->stage[stage].sampler_start;
3006
3007 uint32_t start = layout->set[set].sampler_start[stage];
3008
3009 for (uint32_t b = 0; b < set_layout->stage[stage].sampler_count; b++) {
3010 struct anv_sampler *sampler =
3011 d->set->descriptors[sampler_slots[b].index].sampler;
3012
3013 if (!sampler)
3014 continue;
3015
3016 memcpy(state.map + (start + b) * 16,
3017 sampler->state, sizeof(sampler->state));
3018 }
3019 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07003020
3021 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003022}
3023
3024static void
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003025flush_descriptor_sets(struct anv_cmd_buffer *cmd_buffer)
3026{
Jason Ekstrand22513052015-05-30 10:07:29 -07003027 uint32_t s, dirty = cmd_buffer->descriptors_dirty &
3028 cmd_buffer->pipeline->active_stages;
3029
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07003030 VkResult result;
Jason Ekstrand22513052015-05-30 10:07:29 -07003031 for_each_bit(s, dirty) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07003032 result = cmd_buffer_emit_binding_table(cmd_buffer, s);
3033 if (result != VK_SUCCESS)
3034 break;
3035
3036 result = cmd_buffer_emit_samplers(cmd_buffer, s);
3037 if (result != VK_SUCCESS)
3038 break;
3039 }
3040
3041 if (result != VK_SUCCESS) {
3042 assert(result == VK_ERROR_OUT_OF_DEVICE_MEMORY);
3043
3044 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
3045 assert(result == VK_SUCCESS);
3046
Jason Ekstrand22513052015-05-30 10:07:29 -07003047 /* Re-emit all active binding tables */
3048 for_each_bit(s, cmd_buffer->pipeline->active_stages) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07003049 result = cmd_buffer_emit_binding_table(cmd_buffer, s);
3050 result = cmd_buffer_emit_samplers(cmd_buffer, s);
3051 }
3052
3053 /* It had better succeed this time */
3054 assert(result == VK_SUCCESS);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003055 }
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003056
Jason Ekstrand22513052015-05-30 10:07:29 -07003057 cmd_buffer->descriptors_dirty &= ~cmd_buffer->pipeline->active_stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003058}
3059
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003060static struct anv_state
3061anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
3062 uint32_t *a, uint32_t dwords, uint32_t alignment)
3063{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003064 struct anv_state state;
3065
Jason Ekstrandce002332015-06-05 17:14:41 -07003066 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
3067 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003068 memcpy(state.map, a, dwords * 4);
3069
3070 return state;
3071}
3072
3073static struct anv_state
3074anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrandce002332015-06-05 17:14:41 -07003075 uint32_t *a, uint32_t *b,
3076 uint32_t dwords, uint32_t alignment)
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003077{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003078 struct anv_state state;
3079 uint32_t *p;
3080
Jason Ekstrandce002332015-06-05 17:14:41 -07003081 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
3082 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003083 p = state.map;
3084 for (uint32_t i = 0; i < dwords; i++)
3085 p[i] = a[i] | b[i];
3086
3087 return state;
3088}
3089
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003090static void
3091anv_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
3092{
3093 struct anv_pipeline *pipeline = cmd_buffer->pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003094 uint32_t *p;
3095
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003096 uint32_t vb_emit = cmd_buffer->vb_dirty & pipeline->vb_used;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003097
3098 if (vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003099 const uint32_t num_buffers = __builtin_popcount(vb_emit);
3100 const uint32_t num_dwords = 1 + num_buffers * 4;
3101
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003102 p = anv_batch_emitn(&cmd_buffer->batch, num_dwords,
3103 GEN8_3DSTATE_VERTEX_BUFFERS);
3104 uint32_t vb, i = 0;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003105 for_each_bit(vb, vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003106 struct anv_buffer *buffer = cmd_buffer->vertex_bindings[vb].buffer;
3107 uint32_t offset = cmd_buffer->vertex_bindings[vb].offset;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003108
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003109 struct GEN8_VERTEX_BUFFER_STATE state = {
3110 .VertexBufferIndex = vb,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07003111 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003112 .AddressModifyEnable = true,
3113 .BufferPitch = pipeline->binding_stride[vb],
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003114 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003115 .BufferSize = buffer->size - offset
3116 };
3117
3118 GEN8_VERTEX_BUFFER_STATE_pack(&cmd_buffer->batch, &p[1 + i * 4], &state);
3119 i++;
3120 }
3121 }
3122
3123 if (cmd_buffer->dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)
3124 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
3125
Jason Ekstrand22513052015-05-30 10:07:29 -07003126 if (cmd_buffer->descriptors_dirty)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003127 flush_descriptor_sets(cmd_buffer);
3128
Kristian Høgsberg99883772015-05-26 09:40:10 -07003129 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_RS_DIRTY)) {
Kristian Høgsberg55b9b702015-05-11 22:23:38 -07003130 anv_batch_emit_merge(&cmd_buffer->batch,
3131 cmd_buffer->rs_state->state_sf, pipeline->state_sf);
Kristian Høgsberg99883772015-05-26 09:40:10 -07003132 anv_batch_emit_merge(&cmd_buffer->batch,
3133 cmd_buffer->rs_state->state_raster, pipeline->state_raster);
3134 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003135
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07003136 if (cmd_buffer->ds_state &&
3137 (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)))
3138 anv_batch_emit_merge(&cmd_buffer->batch,
3139 cmd_buffer->ds_state->state_wm_depth_stencil,
3140 pipeline->state_wm_depth_stencil);
3141
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003142 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_CB_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)) {
3143 struct anv_state state;
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003144 if (cmd_buffer->ds_state == NULL)
3145 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3146 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003147 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003148 else if (cmd_buffer->cb_state == NULL)
3149 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3150 cmd_buffer->ds_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003151 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003152 else
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003153 state = anv_cmd_buffer_merge_dynamic(cmd_buffer,
3154 cmd_buffer->ds_state->state_color_calc,
3155 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003156 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003157
3158 anv_batch_emit(&cmd_buffer->batch,
3159 GEN8_3DSTATE_CC_STATE_POINTERS,
3160 .ColorCalcStatePointer = state.offset,
3161 .ColorCalcStatePointerValid = true);
3162 }
3163
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003164 cmd_buffer->vb_dirty &= ~vb_emit;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003165 cmd_buffer->dirty = 0;
3166}
3167
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003168void anv_CmdDraw(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003169 VkCmdBuffer cmdBuffer,
3170 uint32_t firstVertex,
3171 uint32_t vertexCount,
3172 uint32_t firstInstance,
3173 uint32_t instanceCount)
3174{
3175 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3176
3177 anv_cmd_buffer_flush_state(cmd_buffer);
3178
3179 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3180 .VertexAccessType = SEQUENTIAL,
3181 .VertexCountPerInstance = vertexCount,
3182 .StartVertexLocation = firstVertex,
3183 .InstanceCount = instanceCount,
3184 .StartInstanceLocation = firstInstance,
3185 .BaseVertexLocation = 0);
3186}
3187
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003188void anv_CmdDrawIndexed(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003189 VkCmdBuffer cmdBuffer,
3190 uint32_t firstIndex,
3191 uint32_t indexCount,
3192 int32_t vertexOffset,
3193 uint32_t firstInstance,
3194 uint32_t instanceCount)
3195{
3196 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3197
3198 anv_cmd_buffer_flush_state(cmd_buffer);
3199
3200 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3201 .VertexAccessType = RANDOM,
3202 .VertexCountPerInstance = indexCount,
3203 .StartVertexLocation = firstIndex,
3204 .InstanceCount = instanceCount,
3205 .StartInstanceLocation = firstInstance,
Kristian Høgsberg Kristensenc8f07852015-06-02 22:35:47 -07003206 .BaseVertexLocation = vertexOffset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003207}
3208
3209static void
3210anv_batch_lrm(struct anv_batch *batch,
3211 uint32_t reg, struct anv_bo *bo, uint32_t offset)
3212{
3213 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3214 .RegisterAddress = reg,
3215 .MemoryAddress = { bo, offset });
3216}
3217
3218static void
3219anv_batch_lri(struct anv_batch *batch, uint32_t reg, uint32_t imm)
3220{
3221 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_IMM,
3222 .RegisterOffset = reg,
3223 .DataDWord = imm);
3224}
3225
3226/* Auto-Draw / Indirect Registers */
3227#define GEN7_3DPRIM_END_OFFSET 0x2420
3228#define GEN7_3DPRIM_START_VERTEX 0x2430
3229#define GEN7_3DPRIM_VERTEX_COUNT 0x2434
3230#define GEN7_3DPRIM_INSTANCE_COUNT 0x2438
3231#define GEN7_3DPRIM_START_INSTANCE 0x243C
3232#define GEN7_3DPRIM_BASE_VERTEX 0x2440
3233
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003234void anv_CmdDrawIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003235 VkCmdBuffer cmdBuffer,
3236 VkBuffer _buffer,
3237 VkDeviceSize offset,
3238 uint32_t count,
3239 uint32_t stride)
3240{
3241 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3242 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003243 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003244 uint32_t bo_offset = buffer->offset + offset;
3245
3246 anv_cmd_buffer_flush_state(cmd_buffer);
3247
3248 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3249 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3250 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3251 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 12);
3252 anv_batch_lri(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, 0);
3253
3254 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3255 .IndirectParameterEnable = true,
3256 .VertexAccessType = SEQUENTIAL);
3257}
3258
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003259void anv_CmdDrawIndexedIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003260 VkCmdBuffer cmdBuffer,
3261 VkBuffer _buffer,
3262 VkDeviceSize offset,
3263 uint32_t count,
3264 uint32_t stride)
3265{
3266 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3267 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003268 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003269 uint32_t bo_offset = buffer->offset + offset;
3270
3271 anv_cmd_buffer_flush_state(cmd_buffer);
3272
3273 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3274 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3275 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3276 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, bo, bo_offset + 12);
3277 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 16);
3278
3279 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3280 .IndirectParameterEnable = true,
3281 .VertexAccessType = RANDOM);
3282}
3283
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003284void anv_CmdDispatch(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003285 VkCmdBuffer cmdBuffer,
3286 uint32_t x,
3287 uint32_t y,
3288 uint32_t z)
3289{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003290 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003291}
3292
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003293void anv_CmdDispatchIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003294 VkCmdBuffer cmdBuffer,
3295 VkBuffer buffer,
3296 VkDeviceSize offset)
3297{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003298 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003299}
3300
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003301void anv_CmdSetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003302 VkCmdBuffer cmdBuffer,
3303 VkEvent event,
3304 VkPipeEvent pipeEvent)
3305{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003306 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003307}
3308
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003309void anv_CmdResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003310 VkCmdBuffer cmdBuffer,
3311 VkEvent event,
3312 VkPipeEvent pipeEvent)
3313{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003314 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003315}
3316
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003317void anv_CmdWaitEvents(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003318 VkCmdBuffer cmdBuffer,
3319 VkWaitEvent waitEvent,
3320 uint32_t eventCount,
3321 const VkEvent* pEvents,
3322 uint32_t memBarrierCount,
3323 const void** ppMemBarriers)
3324{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003325 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003326}
3327
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003328void anv_CmdPipelineBarrier(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003329 VkCmdBuffer cmdBuffer,
3330 VkWaitEvent waitEvent,
3331 uint32_t pipeEventCount,
3332 const VkPipeEvent* pPipeEvents,
3333 uint32_t memBarrierCount,
3334 const void** ppMemBarriers)
3335{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003336 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003337}
3338
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003339void anv_CmdInitAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003340 VkCmdBuffer cmdBuffer,
3341 VkPipelineBindPoint pipelineBindPoint,
3342 uint32_t startCounter,
3343 uint32_t counterCount,
3344 const uint32_t* pData)
3345{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003346 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003347}
3348
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003349void anv_CmdLoadAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003350 VkCmdBuffer cmdBuffer,
3351 VkPipelineBindPoint pipelineBindPoint,
3352 uint32_t startCounter,
3353 uint32_t counterCount,
3354 VkBuffer srcBuffer,
3355 VkDeviceSize srcOffset)
3356{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003357 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003358}
3359
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003360void anv_CmdSaveAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003361 VkCmdBuffer cmdBuffer,
3362 VkPipelineBindPoint pipelineBindPoint,
3363 uint32_t startCounter,
3364 uint32_t counterCount,
3365 VkBuffer destBuffer,
3366 VkDeviceSize destOffset)
3367{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003368 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003369}
3370
Jason Ekstrand57153da2015-05-22 15:15:08 -07003371static void
3372anv_framebuffer_destroy(struct anv_device *device,
3373 struct anv_object *object,
3374 VkObjectType obj_type)
3375{
3376 struct anv_framebuffer *fb = (struct anv_framebuffer *)object;
3377
3378 assert(obj_type == VK_OBJECT_TYPE_FRAMEBUFFER);
3379
3380 anv_DestroyObject((VkDevice) device,
3381 VK_OBJECT_TYPE_DYNAMIC_VP_STATE,
3382 fb->vp_state);
3383
3384 anv_device_free(device, fb);
3385}
3386
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003387VkResult anv_CreateFramebuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003388 VkDevice _device,
3389 const VkFramebufferCreateInfo* pCreateInfo,
3390 VkFramebuffer* pFramebuffer)
3391{
3392 struct anv_device *device = (struct anv_device *) _device;
3393 struct anv_framebuffer *framebuffer;
3394
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003395 static const struct anv_depth_stencil_view null_view =
3396 { .depth_format = D16_UNORM, .depth_stride = 0, .stencil_stride = 0 };
3397
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003398 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
3399
3400 framebuffer = anv_device_alloc(device, sizeof(*framebuffer), 8,
3401 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3402 if (framebuffer == NULL)
3403 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3404
Jason Ekstrand57153da2015-05-22 15:15:08 -07003405 framebuffer->base.destructor = anv_framebuffer_destroy;
3406
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003407 framebuffer->color_attachment_count = pCreateInfo->colorAttachmentCount;
3408 for (uint32_t i = 0; i < pCreateInfo->colorAttachmentCount; i++) {
3409 framebuffer->color_attachments[i] =
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07003410 (struct anv_surface_view *) pCreateInfo->pColorAttachments[i].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003411 }
3412
3413 if (pCreateInfo->pDepthStencilAttachment) {
3414 framebuffer->depth_stencil =
3415 (struct anv_depth_stencil_view *) pCreateInfo->pDepthStencilAttachment->view;
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003416 } else {
3417 framebuffer->depth_stencil = &null_view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003418 }
3419
3420 framebuffer->sample_count = pCreateInfo->sampleCount;
3421 framebuffer->width = pCreateInfo->width;
3422 framebuffer->height = pCreateInfo->height;
3423 framebuffer->layers = pCreateInfo->layers;
3424
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003425 vkCreateDynamicViewportState((VkDevice) device,
Jason Ekstrand0599d392015-06-09 15:53:10 -07003426 &(VkDynamicVpStateCreateInfo) {
3427 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO,
3428 .viewportAndScissorCount = 1,
3429 .pViewports = (VkViewport[]) {
3430 {
3431 .originX = 0,
3432 .originY = 0,
3433 .width = pCreateInfo->width,
3434 .height = pCreateInfo->height,
3435 .minDepth = 0,
3436 .maxDepth = 1
3437 },
3438 },
3439 .pScissors = (VkRect[]) {
3440 { { 0, 0 },
3441 { pCreateInfo->width, pCreateInfo->height } },
3442 }
3443 },
3444 &framebuffer->vp_state);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003445
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003446 *pFramebuffer = (VkFramebuffer) framebuffer;
3447
3448 return VK_SUCCESS;
3449}
3450
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003451VkResult anv_CreateRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003452 VkDevice _device,
3453 const VkRenderPassCreateInfo* pCreateInfo,
3454 VkRenderPass* pRenderPass)
3455{
3456 struct anv_device *device = (struct anv_device *) _device;
3457 struct anv_render_pass *pass;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003458 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003459
3460 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
3461
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003462 size = sizeof(*pass) +
3463 pCreateInfo->layers * sizeof(struct anv_render_pass_layer);
3464 pass = anv_device_alloc(device, size, 8,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003465 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3466 if (pass == NULL)
3467 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3468
3469 pass->render_area = pCreateInfo->renderArea;
3470
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003471 pass->num_layers = pCreateInfo->layers;
3472
3473 pass->num_clear_layers = 0;
3474 for (uint32_t i = 0; i < pCreateInfo->layers; i++) {
3475 pass->layers[i].color_load_op = pCreateInfo->pColorLoadOps[i];
3476 pass->layers[i].clear_color = pCreateInfo->pColorLoadClearValues[i];
3477 if (pass->layers[i].color_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR)
3478 pass->num_clear_layers++;
3479 }
3480
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003481 *pRenderPass = (VkRenderPass) pass;
3482
3483 return VK_SUCCESS;
3484}
3485
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003486static void
3487anv_cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer,
3488 struct anv_render_pass *pass)
3489{
3490 const struct anv_depth_stencil_view *view =
3491 cmd_buffer->framebuffer->depth_stencil;
3492
3493 /* FIXME: Implement the PMA stall W/A */
3494
3495 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DEPTH_BUFFER,
3496 .SurfaceType = SURFTYPE_2D,
3497 .DepthWriteEnable = view->depth_stride > 0,
3498 .StencilWriteEnable = view->stencil_stride > 0,
3499 .HierarchicalDepthBufferEnable = false,
3500 .SurfaceFormat = view->depth_format,
3501 .SurfacePitch = view->depth_stride > 0 ? view->depth_stride - 1 : 0,
3502 .SurfaceBaseAddress = { view->bo, view->depth_offset },
3503 .Height = pass->render_area.extent.height - 1,
3504 .Width = pass->render_area.extent.width - 1,
3505 .LOD = 0,
3506 .Depth = 1 - 1,
3507 .MinimumArrayElement = 0,
3508 .DepthBufferObjectControlState = GEN8_MOCS,
3509 .RenderTargetViewExtent = 1 - 1,
3510 .SurfaceQPitch = 0);
3511
3512 /* Disable hierarchial depth buffers. */
3513 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HIER_DEPTH_BUFFER);
3514
3515 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STENCIL_BUFFER,
3516 .StencilBufferEnable = view->stencil_stride > 0,
3517 .StencilBufferObjectControlState = GEN8_MOCS,
3518 .SurfacePitch = view->stencil_stride > 0 ? view->stencil_stride - 1 : 0,
3519 .SurfaceBaseAddress = { view->bo, view->stencil_offset },
3520 .SurfaceQPitch = 0);
3521
3522 /* Clear the clear params. */
3523 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_CLEAR_PARAMS);
3524}
3525
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003526void anv_CmdBeginRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003527 VkCmdBuffer cmdBuffer,
3528 const VkRenderPassBegin* pRenderPassBegin)
3529{
3530 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3531 struct anv_render_pass *pass = (struct anv_render_pass *) pRenderPassBegin->renderPass;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003532 struct anv_framebuffer *framebuffer =
3533 (struct anv_framebuffer *) pRenderPassBegin->framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003534
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003535 cmd_buffer->framebuffer = framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003536
Jason Ekstrand22513052015-05-30 10:07:29 -07003537 cmd_buffer->descriptors_dirty |= VK_SHADER_STAGE_FRAGMENT_BIT;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003538
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003539 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DRAWING_RECTANGLE,
3540 .ClippedDrawingRectangleYMin = pass->render_area.offset.y,
3541 .ClippedDrawingRectangleXMin = pass->render_area.offset.x,
3542 .ClippedDrawingRectangleYMax =
3543 pass->render_area.offset.y + pass->render_area.extent.height - 1,
3544 .ClippedDrawingRectangleXMax =
3545 pass->render_area.offset.x + pass->render_area.extent.width - 1,
3546 .DrawingRectangleOriginY = 0,
3547 .DrawingRectangleOriginX = 0);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003548
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003549 anv_cmd_buffer_emit_depth_stencil(cmd_buffer, pass);
3550
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003551 anv_cmd_buffer_clear(cmd_buffer, pass);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003552}
3553
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003554void anv_CmdEndRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003555 VkCmdBuffer cmdBuffer,
3556 VkRenderPass renderPass)
3557{
Jason Ekstranda1309c52015-05-13 22:13:05 -07003558 /* Emit a flushing pipe control at the end of a pass. This is kind of a
3559 * hack but it ensures that render targets always actually get written.
3560 * Eventually, we should do flushing based on image format transitions
3561 * or something of that nature.
3562 */
3563 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3564 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3565 .PostSyncOperation = NoWrite,
3566 .RenderTargetCacheFlushEnable = true,
3567 .InstructionCacheInvalidateEnable = true,
3568 .DepthCacheFlushEnable = true,
3569 .VFCacheInvalidationEnable = true,
3570 .TextureCacheInvalidationEnable = true,
3571 .CommandStreamerStallEnable = true);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003572}
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003573
3574void vkCmdDbgMarkerBegin(
3575 VkCmdBuffer cmdBuffer,
3576 const char* pMarker)
3577 __attribute__ ((visibility ("default")));
3578
3579void vkCmdDbgMarkerEnd(
3580 VkCmdBuffer cmdBuffer)
3581 __attribute__ ((visibility ("default")));
3582
3583VkResult vkDbgSetObjectTag(
3584 VkDevice device,
3585 VkObject object,
3586 size_t tagSize,
3587 const void* pTag)
3588 __attribute__ ((visibility ("default")));
3589
3590
3591void vkCmdDbgMarkerBegin(
3592 VkCmdBuffer cmdBuffer,
3593 const char* pMarker)
3594{
3595}
3596
3597void vkCmdDbgMarkerEnd(
3598 VkCmdBuffer cmdBuffer)
3599{
3600}
3601
3602VkResult vkDbgSetObjectTag(
3603 VkDevice device,
3604 VkObject object,
3605 size_t tagSize,
3606 const void* pTag)
3607{
3608 return VK_SUCCESS;
3609}