blob: 4040745c25ad3a6cf96a6254cfdd78d92547f7be [file] [log] [blame]
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001/*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "private.h"
31
32static int
33anv_env_get_int(const char *name)
34{
35 const char *val = getenv(name);
36
37 if (!val)
38 return 0;
39
40 return strtol(val, NULL, 0);
41}
42
43static VkResult
44fill_physical_device(struct anv_physical_device *device,
45 struct anv_instance *instance,
46 const char *path)
47{
48 int fd;
49
50 fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
51 if (fd < 0)
52 return vk_error(VK_ERROR_UNAVAILABLE);
53
54 device->instance = instance;
55 device->path = path;
56
57 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
58 device->no_hw = false;
59 if (device->chipset_id) {
60 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
61 device->no_hw = true;
62 } else {
63 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
64 }
65 if (!device->chipset_id)
66 goto fail;
67
68 device->name = brw_get_device_name(device->chipset_id);
69 device->info = brw_get_device_info(device->chipset_id, -1);
70 if (!device->info)
71 goto fail;
72
73 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
74 goto fail;
75
76 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
77 goto fail;
78
79 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
80 goto fail;
81
82 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
83 goto fail;
84
85 close(fd);
86
87 return VK_SUCCESS;
88
89 fail:
90 close(fd);
91
92 return vk_error(VK_ERROR_UNAVAILABLE);
93}
94
95static void *default_alloc(
96 void* pUserData,
97 size_t size,
98 size_t alignment,
99 VkSystemAllocType allocType)
100{
101 return malloc(size);
102}
103
104static void default_free(
105 void* pUserData,
106 void* pMem)
107{
108 free(pMem);
109}
110
111static const VkAllocCallbacks default_alloc_callbacks = {
112 .pUserData = NULL,
113 .pfnAlloc = default_alloc,
114 .pfnFree = default_free
115};
116
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700117VkResult anv_CreateInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700118 const VkInstanceCreateInfo* pCreateInfo,
119 VkInstance* pInstance)
120{
121 struct anv_instance *instance;
122 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
123 void *user_data = NULL;
124 VkResult result;
125
126 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
127
128 if (pCreateInfo->pAllocCb) {
129 alloc_callbacks = pCreateInfo->pAllocCb;
130 user_data = pCreateInfo->pAllocCb->pUserData;
131 }
132 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
133 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
134 if (!instance)
135 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
136
137 instance->pAllocUserData = alloc_callbacks->pUserData;
138 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
139 instance->pfnFree = alloc_callbacks->pfnFree;
140 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
141
142 instance->physicalDeviceCount = 0;
143 result = fill_physical_device(&instance->physicalDevice,
144 instance, "/dev/dri/renderD128");
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700145
Chad Versacea61f3072015-05-20 19:51:10 -0700146 if (result != VK_SUCCESS)
147 return result;
148
149 instance->physicalDeviceCount++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700150 *pInstance = (VkInstance) instance;
151
152 return VK_SUCCESS;
153}
154
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700155VkResult anv_DestroyInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700156 VkInstance _instance)
157{
158 struct anv_instance *instance = (struct anv_instance *) _instance;
159
160 instance->pfnFree(instance->pAllocUserData, instance);
161
162 return VK_SUCCESS;
163}
164
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700165VkResult anv_EnumeratePhysicalDevices(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700166 VkInstance _instance,
167 uint32_t* pPhysicalDeviceCount,
168 VkPhysicalDevice* pPhysicalDevices)
169{
170 struct anv_instance *instance = (struct anv_instance *) _instance;
171
172 if (*pPhysicalDeviceCount >= 1)
173 pPhysicalDevices[0] = (VkPhysicalDevice) &instance->physicalDevice;
174 *pPhysicalDeviceCount = instance->physicalDeviceCount;
175
176 return VK_SUCCESS;
177}
178
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700179VkResult anv_GetPhysicalDeviceInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700180 VkPhysicalDevice physicalDevice,
181 VkPhysicalDeviceInfoType infoType,
182 size_t* pDataSize,
183 void* pData)
184{
185 struct anv_physical_device *device = (struct anv_physical_device *) physicalDevice;
186 VkPhysicalDeviceProperties *properties;
187 VkPhysicalDevicePerformance *performance;
188 VkPhysicalDeviceQueueProperties *queue_properties;
189 VkPhysicalDeviceMemoryProperties *memory_properties;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700190 VkDisplayPropertiesWSI *display_properties;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700191 uint64_t ns_per_tick = 80;
192
Kristian Høgsberga29df712015-05-15 22:04:52 -0700193 switch ((uint32_t) infoType) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700194 case VK_PHYSICAL_DEVICE_INFO_TYPE_PROPERTIES:
195 properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700196
197 *pDataSize = sizeof(*properties);
198 if (pData == NULL)
199 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700200
201 properties->apiVersion = 1;
202 properties->driverVersion = 1;
203 properties->vendorId = 0x8086;
204 properties->deviceId = device->chipset_id;
205 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
206 strcpy(properties->deviceName, device->name);
207 properties->maxInlineMemoryUpdateSize = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700208 properties->maxBoundDescriptorSets = MAX_SETS;
209 properties->maxThreadGroupSize = 512;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700210 properties->timestampFrequency = 1000 * 1000 * 1000 / ns_per_tick;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700211 properties->multiColorAttachmentClears = true;
212 properties->maxDescriptorSets = 8;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700213 properties->maxViewports = 16;
214 properties->maxColorAttachments = 8;
215 return VK_SUCCESS;
216
217 case VK_PHYSICAL_DEVICE_INFO_TYPE_PERFORMANCE:
218 performance = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700219
220 *pDataSize = sizeof(*performance);
221 if (pData == NULL)
222 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700223
224 performance->maxDeviceClock = 1.0;
225 performance->aluPerClock = 1.0;
226 performance->texPerClock = 1.0;
227 performance->primsPerClock = 1.0;
228 performance->pixelsPerClock = 1.0;
229 return VK_SUCCESS;
230
231 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PROPERTIES:
232 queue_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700233
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700234 *pDataSize = sizeof(*queue_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700235 if (pData == NULL)
236 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700237
238 queue_properties->queueFlags = 0;
239 queue_properties->queueCount = 1;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700240 queue_properties->supportsTimestamps = true;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700241 return VK_SUCCESS;
242
243 case VK_PHYSICAL_DEVICE_INFO_TYPE_MEMORY_PROPERTIES:
244 memory_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700245
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700246 *pDataSize = sizeof(*memory_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700247 if (pData == NULL)
248 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700249
250 memory_properties->supportsMigration = false;
251 memory_properties->supportsPinning = false;
252 return VK_SUCCESS;
253
Kristian Høgsberga29df712015-05-15 22:04:52 -0700254 case VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI:
255 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI");
256
257 *pDataSize = sizeof(*display_properties);
258 if (pData == NULL)
259 return VK_SUCCESS;
260
261 display_properties = pData;
262 display_properties->display = 0;
263 display_properties->physicalResolution = (VkExtent2D) { 0, 0 };
264 return VK_SUCCESS;
265
266 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI:
267 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI");
268 return VK_SUCCESS;
269
270
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700271 default:
272 return VK_UNSUPPORTED;
273 }
274
275}
276
Jason Ekstrande7acdda2015-07-07 18:51:53 -0700277PFN_vkVoidFunction anv_GetInstanceProcAddr(
278 VkInstance instance,
279 const char* pName)
280{
281 return anv_lookup_entrypoint(pName);
282}
283
284PFN_vkVoidFunction anv_GetDeviceProcAddr(
285 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700286 const char* pName)
287{
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700288 return anv_lookup_entrypoint(pName);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700289}
290
291static void
292parse_debug_flags(struct anv_device *device)
293{
294 const char *debug, *p, *end;
295
296 debug = getenv("INTEL_DEBUG");
297 device->dump_aub = false;
298 if (debug) {
299 for (p = debug; *p; p = end + 1) {
300 end = strchrnul(p, ',');
301 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
302 device->dump_aub = true;
303 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
304 device->no_hw = true;
305 if (*end == '\0')
306 break;
307 }
308 }
309}
310
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700311static VkResult
312anv_queue_init(struct anv_device *device, struct anv_queue *queue)
313{
314 queue->device = device;
315 queue->pool = &device->surface_state_pool;
316
317 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
318 if (queue->completed_serial.map == NULL)
319 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
320
321 *(uint32_t *)queue->completed_serial.map = 0;
322 queue->next_serial = 1;
323
324 return VK_SUCCESS;
325}
326
327static void
328anv_queue_finish(struct anv_queue *queue)
329{
330#ifdef HAVE_VALGRIND
331 /* This gets torn down with the device so we only need to do this if
332 * valgrind is present.
333 */
334 anv_state_pool_free(queue->pool, queue->completed_serial);
335#endif
336}
337
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700338static void
339anv_device_init_border_colors(struct anv_device *device)
340{
Jason Ekstrand522ab832015-07-08 11:44:52 -0700341 static const VkClearColorValue border_colors[] = {
342 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
343 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
344 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
345 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
346 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
347 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700348 };
349
Jason Ekstrand522ab832015-07-08 11:44:52 -0700350 device->border_colors =
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700351 anv_state_pool_alloc(&device->dynamic_state_pool,
Jason Ekstrand522ab832015-07-08 11:44:52 -0700352 sizeof(border_colors), 32);
353 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700354}
355
Jason Ekstrand730ca0e2015-05-28 10:20:18 -0700356static const uint32_t BATCH_SIZE = 8192;
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700357
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700358VkResult anv_CreateDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700359 VkPhysicalDevice _physicalDevice,
360 const VkDeviceCreateInfo* pCreateInfo,
361 VkDevice* pDevice)
362{
363 struct anv_physical_device *physicalDevice =
364 (struct anv_physical_device *) _physicalDevice;
365 struct anv_instance *instance = physicalDevice->instance;
366 struct anv_device *device;
367
368 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
369
370 device = instance->pfnAlloc(instance->pAllocUserData,
371 sizeof(*device), 8,
372 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
373 if (!device)
374 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
375
376 device->no_hw = physicalDevice->no_hw;
377 parse_debug_flags(device);
378
379 device->instance = physicalDevice->instance;
380 device->fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
381 if (device->fd == -1)
382 goto fail_device;
383
384 device->context_id = anv_gem_create_context(device);
385 if (device->context_id == -1)
386 goto fail_fd;
387
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700388 anv_bo_pool_init(&device->batch_bo_pool, device, BATCH_SIZE);
389
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700390 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700391
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700392 anv_state_pool_init(&device->dynamic_state_pool,
393 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700394
395 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
396 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
397
398 anv_state_pool_init(&device->surface_state_pool,
399 &device->surface_state_block_pool);
400
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -0700401 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
402
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700403 device->info = *physicalDevice->info;
404
Kristian Høgsberg Kristensen9eab70e2015-06-03 23:03:29 -0700405 device->compiler = anv_compiler_create(device);
406 device->aub_writer = NULL;
407
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700408 pthread_mutex_init(&device->mutex, NULL);
409
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700410 anv_queue_init(device, &device->queue);
411
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -0700412 anv_device_init_meta(device);
413
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700414 anv_device_init_border_colors(device);
415
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700416 *pDevice = (VkDevice) device;
417
418 return VK_SUCCESS;
419
420 fail_fd:
421 close(device->fd);
422 fail_device:
423 anv_device_free(device, device);
424
425 return vk_error(VK_ERROR_UNAVAILABLE);
426}
427
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700428VkResult anv_DestroyDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700429 VkDevice _device)
430{
431 struct anv_device *device = (struct anv_device *) _device;
432
433 anv_compiler_destroy(device->compiler);
434
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700435 anv_queue_finish(&device->queue);
436
Jason Ekstrand3a38b0d2015-06-09 11:08:51 -0700437 anv_device_finish_meta(device);
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700438
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700439#ifdef HAVE_VALGRIND
440 /* We only need to free these to prevent valgrind errors. The backing
441 * BO will go away in a couple of lines so we don't actually leak.
442 */
Jason Ekstrand522ab832015-07-08 11:44:52 -0700443 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700444#endif
445
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700446 anv_bo_pool_finish(&device->batch_bo_pool);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700447 anv_block_pool_finish(&device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700448 anv_block_pool_finish(&device->instruction_block_pool);
449 anv_block_pool_finish(&device->surface_state_block_pool);
450
451 close(device->fd);
452
453 if (device->aub_writer)
454 anv_aub_writer_destroy(device->aub_writer);
455
456 anv_device_free(device, device);
457
458 return VK_SUCCESS;
459}
460
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700461static const VkExtensionProperties global_extensions[] = {
462 {
463 .extName = "VK_WSI_LunarG",
464 .version = 3
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700465 }
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700466};
467
468VkResult anv_GetGlobalExtensionCount(
469 uint32_t* pCount)
470{
471 *pCount = ARRAY_SIZE(global_extensions);
472
473 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700474}
475
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700476
477VkResult anv_GetGlobalExtensionProperties(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700478 uint32_t extensionIndex,
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700479 VkExtensionProperties* pProperties)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700480{
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700481 assert(extensionIndex < ARRAY_SIZE(global_extensions));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700482
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700483 *pProperties = global_extensions[extensionIndex];
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700484
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700485 return VK_SUCCESS;
486}
487
488VkResult anv_GetPhysicalDeviceExtensionCount(
489 VkPhysicalDevice physicalDevice,
490 uint32_t* pCount)
491{
492 /* None supported at this time */
493 *pCount = 0;
494
495 return VK_SUCCESS;
496}
497
498VkResult anv_GetPhysicalDeviceExtensionProperties(
499 VkPhysicalDevice physicalDevice,
500 uint32_t extensionIndex,
501 VkExtensionProperties* pProperties)
502{
503 /* None supported at this time */
504 return vk_error(VK_ERROR_INVALID_EXTENSION);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700505}
506
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700507VkResult anv_EnumerateLayers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700508 VkPhysicalDevice physicalDevice,
509 size_t maxStringSize,
510 size_t* pLayerCount,
511 char* const* pOutLayers,
512 void* pReserved)
513{
514 *pLayerCount = 0;
515
516 return VK_SUCCESS;
517}
518
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700519VkResult anv_GetDeviceQueue(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700520 VkDevice _device,
521 uint32_t queueNodeIndex,
522 uint32_t queueIndex,
523 VkQueue* pQueue)
524{
525 struct anv_device *device = (struct anv_device *) _device;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700526
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700527 assert(queueIndex == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700528
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700529 *pQueue = (VkQueue) &device->queue;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700530
531 return VK_SUCCESS;
532}
533
Jason Ekstrand59def432015-05-27 11:41:28 -0700534VkResult
Jason Ekstrand403266b2015-05-25 17:38:15 -0700535anv_reloc_list_init(struct anv_reloc_list *list, struct anv_device *device)
536{
537 list->num_relocs = 0;
538 list->array_length = 256;
539 list->relocs =
540 anv_device_alloc(device, list->array_length * sizeof(*list->relocs), 8,
541 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
542
543 if (list->relocs == NULL)
544 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
545
546 list->reloc_bos =
547 anv_device_alloc(device, list->array_length * sizeof(*list->reloc_bos), 8,
548 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
549
550 if (list->relocs == NULL) {
551 anv_device_free(device, list->relocs);
552 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
553 }
554
555 return VK_SUCCESS;
556}
557
Jason Ekstrand59def432015-05-27 11:41:28 -0700558void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700559anv_reloc_list_finish(struct anv_reloc_list *list, struct anv_device *device)
560{
561 anv_device_free(device, list->relocs);
562 anv_device_free(device, list->reloc_bos);
563}
564
565static VkResult
566anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
567 size_t num_additional_relocs)
568{
569 if (list->num_relocs + num_additional_relocs <= list->array_length)
570 return VK_SUCCESS;
571
572 size_t new_length = list->array_length * 2;
573 while (new_length < list->num_relocs + num_additional_relocs)
574 new_length *= 2;
575
576 struct drm_i915_gem_relocation_entry *new_relocs =
577 anv_device_alloc(device, new_length * sizeof(*list->relocs), 8,
578 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
579 if (new_relocs == NULL)
580 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
581
582 struct anv_bo **new_reloc_bos =
583 anv_device_alloc(device, new_length * sizeof(*list->reloc_bos), 8,
584 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
585 if (new_relocs == NULL) {
586 anv_device_free(device, new_relocs);
587 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
588 }
589
590 memcpy(new_relocs, list->relocs, list->num_relocs * sizeof(*list->relocs));
591 memcpy(new_reloc_bos, list->reloc_bos,
592 list->num_relocs * sizeof(*list->reloc_bos));
593
594 anv_device_free(device, list->relocs);
595 anv_device_free(device, list->reloc_bos);
596
597 list->relocs = new_relocs;
598 list->reloc_bos = new_reloc_bos;
599
600 return VK_SUCCESS;
601}
602
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700603static VkResult
604anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700605{
606 VkResult result;
607
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700608 struct anv_batch_bo *bbo =
609 anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
610 if (bbo == NULL)
611 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700612
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700613 bbo->num_relocs = 0;
614 bbo->prev_batch_bo = NULL;
615
616 result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700617 if (result != VK_SUCCESS) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700618 anv_device_free(device, bbo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700619 return result;
620 }
621
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700622 *bbo_out = bbo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700623
624 return VK_SUCCESS;
625}
626
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700627static void
628anv_batch_bo_start(struct anv_batch_bo *bbo, struct anv_batch *batch,
629 size_t batch_padding)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700630{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700631 batch->next = batch->start = bbo->bo.map;
632 batch->end = bbo->bo.map + bbo->bo.size - batch_padding;
633 bbo->first_reloc = batch->relocs.num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700634}
635
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700636static void
637anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700638{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700639 assert(batch->start == bbo->bo.map);
640 bbo->length = batch->next - batch->start;
Jason Ekstrand9cae3d12015-06-09 21:36:12 -0700641 VG(VALGRIND_CHECK_MEM_IS_DEFINED(batch->start, bbo->length));
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700642 bbo->num_relocs = batch->relocs.num_relocs - bbo->first_reloc;
643}
644
645static void
646anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_device *device)
647{
648 anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
649 anv_device_free(device, bbo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700650}
651
652void *
653anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords)
654{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700655 if (batch->next + num_dwords * 4 > batch->end)
656 batch->extend_cb(batch, batch->user_data);
657
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700658 void *p = batch->next;
659
660 batch->next += num_dwords * 4;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700661 assert(batch->next <= batch->end);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700662
663 return p;
664}
665
666static void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700667anv_reloc_list_append(struct anv_reloc_list *list, struct anv_device *device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700668 struct anv_reloc_list *other, uint32_t offset)
669{
Jason Ekstrand403266b2015-05-25 17:38:15 -0700670 anv_reloc_list_grow(list, device, other->num_relocs);
671 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700672
Jason Ekstrand403266b2015-05-25 17:38:15 -0700673 memcpy(&list->relocs[list->num_relocs], &other->relocs[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700674 other->num_relocs * sizeof(other->relocs[0]));
Jason Ekstrand403266b2015-05-25 17:38:15 -0700675 memcpy(&list->reloc_bos[list->num_relocs], &other->reloc_bos[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700676 other->num_relocs * sizeof(other->reloc_bos[0]));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700677
Jason Ekstrand403266b2015-05-25 17:38:15 -0700678 for (uint32_t i = 0; i < other->num_relocs; i++)
679 list->relocs[i + list->num_relocs].offset += offset;
680
681 list->num_relocs += other->num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700682}
683
684static uint64_t
Jason Ekstrand403266b2015-05-25 17:38:15 -0700685anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
686 uint32_t offset, struct anv_bo *target_bo, uint32_t delta)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700687{
688 struct drm_i915_gem_relocation_entry *entry;
689 int index;
690
Jason Ekstrand403266b2015-05-25 17:38:15 -0700691 anv_reloc_list_grow(list, device, 1);
692 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700693
694 /* XXX: Can we use I915_EXEC_HANDLE_LUT? */
695 index = list->num_relocs++;
696 list->reloc_bos[index] = target_bo;
697 entry = &list->relocs[index];
698 entry->target_handle = target_bo->gem_handle;
699 entry->delta = delta;
700 entry->offset = offset;
701 entry->presumed_offset = target_bo->offset;
702 entry->read_domains = 0;
703 entry->write_domain = 0;
704
705 return target_bo->offset + delta;
706}
707
708void
709anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
710{
711 uint32_t size, offset;
712
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700713 size = other->next - other->start;
714 assert(size % 4 == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700715
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700716 if (batch->next + size > batch->end)
717 batch->extend_cb(batch, batch->user_data);
718
719 assert(batch->next + size <= batch->end);
720
721 memcpy(batch->next, other->start, size);
722
723 offset = batch->next - batch->start;
724 anv_reloc_list_append(&batch->relocs, batch->device,
725 &other->relocs, offset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700726
727 batch->next += size;
728}
729
730uint64_t
731anv_batch_emit_reloc(struct anv_batch *batch,
732 void *location, struct anv_bo *bo, uint32_t delta)
733{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700734 return anv_reloc_list_add(&batch->relocs, batch->device,
735 location - batch->start, bo, delta);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700736}
737
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700738VkResult anv_QueueSubmit(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700739 VkQueue _queue,
740 uint32_t cmdBufferCount,
741 const VkCmdBuffer* pCmdBuffers,
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700742 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700743{
744 struct anv_queue *queue = (struct anv_queue *) _queue;
745 struct anv_device *device = queue->device;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700746 struct anv_fence *fence = (struct anv_fence *) _fence;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700747 int ret;
748
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700749 for (uint32_t i = 0; i < cmdBufferCount; i++) {
750 struct anv_cmd_buffer *cmd_buffer =
751 (struct anv_cmd_buffer *) pCmdBuffers[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700752
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700753 if (device->dump_aub)
754 anv_cmd_buffer_dump(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700755
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700756 if (!device->no_hw) {
757 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf);
758 if (ret != 0)
Kristian Høgsberg2b7a0602015-05-12 14:38:58 -0700759 return vk_error(VK_ERROR_UNKNOWN);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700760
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700761 if (fence) {
762 ret = anv_gem_execbuffer(device, &fence->execbuf);
763 if (ret != 0)
764 return vk_error(VK_ERROR_UNKNOWN);
765 }
766
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700767 for (uint32_t i = 0; i < cmd_buffer->bo_count; i++)
768 cmd_buffer->exec2_bos[i]->offset = cmd_buffer->exec2_objects[i].offset;
769 } else {
770 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
771 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700772 }
773
774 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700775}
776
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700777VkResult anv_QueueWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700778 VkQueue _queue)
779{
780 struct anv_queue *queue = (struct anv_queue *) _queue;
781
782 return vkDeviceWaitIdle((VkDevice) queue->device);
783}
784
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700785VkResult anv_DeviceWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700786 VkDevice _device)
787{
788 struct anv_device *device = (struct anv_device *) _device;
789 struct anv_state state;
790 struct anv_batch batch;
791 struct drm_i915_gem_execbuffer2 execbuf;
792 struct drm_i915_gem_exec_object2 exec2_objects[1];
793 struct anv_bo *bo = NULL;
794 VkResult result;
795 int64_t timeout;
796 int ret;
797
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700798 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
799 bo = &device->dynamic_state_pool.block_pool->bo;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700800 batch.start = batch.next = state.map;
801 batch.end = state.map + 32;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700802 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
803 anv_batch_emit(&batch, GEN8_MI_NOOP);
804
805 exec2_objects[0].handle = bo->gem_handle;
806 exec2_objects[0].relocation_count = 0;
807 exec2_objects[0].relocs_ptr = 0;
808 exec2_objects[0].alignment = 0;
809 exec2_objects[0].offset = bo->offset;
810 exec2_objects[0].flags = 0;
811 exec2_objects[0].rsvd1 = 0;
812 exec2_objects[0].rsvd2 = 0;
813
814 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
815 execbuf.buffer_count = 1;
816 execbuf.batch_start_offset = state.offset;
817 execbuf.batch_len = batch.next - state.map;
818 execbuf.cliprects_ptr = 0;
819 execbuf.num_cliprects = 0;
820 execbuf.DR1 = 0;
821 execbuf.DR4 = 0;
822
823 execbuf.flags =
824 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
825 execbuf.rsvd1 = device->context_id;
826 execbuf.rsvd2 = 0;
827
828 if (!device->no_hw) {
829 ret = anv_gem_execbuffer(device, &execbuf);
830 if (ret != 0) {
831 result = vk_error(VK_ERROR_UNKNOWN);
832 goto fail;
833 }
834
835 timeout = INT64_MAX;
836 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
837 if (ret != 0) {
838 result = vk_error(VK_ERROR_UNKNOWN);
839 goto fail;
840 }
841 }
842
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700843 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700844
845 return VK_SUCCESS;
846
847 fail:
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700848 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700849
850 return result;
851}
852
853void *
854anv_device_alloc(struct anv_device * device,
855 size_t size,
856 size_t alignment,
857 VkSystemAllocType allocType)
858{
859 return device->instance->pfnAlloc(device->instance->pAllocUserData,
860 size,
861 alignment,
862 allocType);
863}
864
865void
866anv_device_free(struct anv_device * device,
867 void * mem)
868{
869 return device->instance->pfnFree(device->instance->pAllocUserData,
870 mem);
871}
872
873VkResult
874anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
875{
876 bo->gem_handle = anv_gem_create(device, size);
877 if (!bo->gem_handle)
878 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
879
880 bo->map = NULL;
881 bo->index = 0;
882 bo->offset = 0;
883 bo->size = size;
884
885 return VK_SUCCESS;
886}
887
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700888VkResult anv_AllocMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700889 VkDevice _device,
890 const VkMemoryAllocInfo* pAllocInfo,
891 VkDeviceMemory* pMem)
892{
893 struct anv_device *device = (struct anv_device *) _device;
894 struct anv_device_memory *mem;
895 VkResult result;
896
897 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
898
899 mem = anv_device_alloc(device, sizeof(*mem), 8,
900 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
901 if (mem == NULL)
902 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
903
904 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
905 if (result != VK_SUCCESS)
906 goto fail;
907
908 *pMem = (VkDeviceMemory) mem;
909
910 return VK_SUCCESS;
911
912 fail:
913 anv_device_free(device, mem);
914
915 return result;
916}
917
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700918VkResult anv_FreeMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700919 VkDevice _device,
920 VkDeviceMemory _mem)
921{
922 struct anv_device *device = (struct anv_device *) _device;
923 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
924
925 if (mem->bo.map)
926 anv_gem_munmap(mem->bo.map, mem->bo.size);
927
928 if (mem->bo.gem_handle != 0)
929 anv_gem_close(device, mem->bo.gem_handle);
930
931 anv_device_free(device, mem);
932
933 return VK_SUCCESS;
934}
935
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700936VkResult anv_MapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700937 VkDevice _device,
938 VkDeviceMemory _mem,
939 VkDeviceSize offset,
940 VkDeviceSize size,
941 VkMemoryMapFlags flags,
942 void** ppData)
943{
944 struct anv_device *device = (struct anv_device *) _device;
945 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
946
947 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
948 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
949 * at a time is valid. We could just mmap up front and return an offset
950 * pointer here, but that may exhaust virtual memory on 32 bit
951 * userspace. */
952
953 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
954 mem->map_size = size;
955
956 *ppData = mem->map;
957
958 return VK_SUCCESS;
959}
960
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700961VkResult anv_UnmapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700962 VkDevice _device,
963 VkDeviceMemory _mem)
964{
965 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
966
967 anv_gem_munmap(mem->map, mem->map_size);
968
969 return VK_SUCCESS;
970}
971
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700972VkResult anv_FlushMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700973 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700974 uint32_t memRangeCount,
975 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700976{
977 /* clflush here for !llc platforms */
978
979 return VK_SUCCESS;
980}
981
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700982VkResult anv_InvalidateMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700983 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700984 uint32_t memRangeCount,
985 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700986{
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700987 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700988}
989
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700990VkResult anv_DestroyObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700991 VkDevice _device,
992 VkObjectType objType,
Jason Ekstrand57153da2015-05-22 15:15:08 -0700993 VkObject _object)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700994{
995 struct anv_device *device = (struct anv_device *) _device;
Jason Ekstrand57153da2015-05-22 15:15:08 -0700996 struct anv_object *object = (struct anv_object *) _object;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700997
Jason Ekstrand57153da2015-05-22 15:15:08 -0700998 switch (objType) {
999 case VK_OBJECT_TYPE_INSTANCE:
1000 return anv_DestroyInstance((VkInstance) _object);
1001
1002 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
1003 /* We don't want to actually destroy physical devices */
1004 return VK_SUCCESS;
1005
1006 case VK_OBJECT_TYPE_DEVICE:
1007 assert(_device == (VkDevice) _object);
1008 return anv_DestroyDevice((VkDevice) _object);
1009
1010 case VK_OBJECT_TYPE_QUEUE:
1011 /* TODO */
1012 return VK_SUCCESS;
1013
1014 case VK_OBJECT_TYPE_DEVICE_MEMORY:
1015 return anv_FreeMemory(_device, (VkDeviceMemory) _object);
1016
1017 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
1018 /* These are just dummys anyway, so we don't need to destroy them */
1019 return VK_SUCCESS;
1020
1021 case VK_OBJECT_TYPE_BUFFER:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001022 case VK_OBJECT_TYPE_IMAGE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001023 case VK_OBJECT_TYPE_DEPTH_STENCIL_VIEW:
1024 case VK_OBJECT_TYPE_SHADER:
1025 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
1026 case VK_OBJECT_TYPE_SAMPLER:
1027 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
1028 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
1029 case VK_OBJECT_TYPE_DYNAMIC_RS_STATE:
1030 case VK_OBJECT_TYPE_DYNAMIC_CB_STATE:
1031 case VK_OBJECT_TYPE_DYNAMIC_DS_STATE:
1032 case VK_OBJECT_TYPE_RENDER_PASS:
1033 /* These are trivially destroyable */
1034 anv_device_free(device, (void *) _object);
1035 return VK_SUCCESS;
1036
1037 case VK_OBJECT_TYPE_COMMAND_BUFFER:
1038 case VK_OBJECT_TYPE_PIPELINE:
1039 case VK_OBJECT_TYPE_DYNAMIC_VP_STATE:
1040 case VK_OBJECT_TYPE_FENCE:
1041 case VK_OBJECT_TYPE_QUERY_POOL:
1042 case VK_OBJECT_TYPE_FRAMEBUFFER:
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001043 case VK_OBJECT_TYPE_BUFFER_VIEW:
1044 case VK_OBJECT_TYPE_IMAGE_VIEW:
1045 case VK_OBJECT_TYPE_COLOR_ATTACHMENT_VIEW:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001046 (object->destructor)(device, object, objType);
1047 return VK_SUCCESS;
1048
1049 case VK_OBJECT_TYPE_SEMAPHORE:
1050 case VK_OBJECT_TYPE_EVENT:
1051 stub_return(VK_UNSUPPORTED);
1052
1053 default:
1054 unreachable("Invalid object type");
1055 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001056}
1057
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001058VkResult anv_GetObjectMemoryRequirements(
1059 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001060 VkObjectType objType,
1061 VkObject object,
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001062 VkMemoryRequirements* pMemoryRequirements)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001063{
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001064 pMemoryRequirements->memPropsAllowed =
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001065 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
Jason Ekstrand68fa7502015-07-06 17:32:28 -07001066 /* VK_MEMORY_PROPERTY_HOST_NON_COHERENT_BIT | */
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001067 /* VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT | */
Jason Ekstrand65f9ccb2015-07-06 17:33:43 -07001068 VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001069
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001070 pMemoryRequirements->memPropsRequired = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001071
1072 switch (objType) {
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001073 case VK_OBJECT_TYPE_BUFFER: {
1074 struct anv_buffer *buffer = (struct anv_buffer *) object;
1075 pMemoryRequirements->size = buffer->size;
1076 pMemoryRequirements->alignment = 16;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001077 break;
1078 }
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001079 case VK_OBJECT_TYPE_IMAGE: {
1080 struct anv_image *image = (struct anv_image *) object;
1081 pMemoryRequirements->size = image->size;
1082 pMemoryRequirements->alignment = image->alignment;
1083 break;
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001084 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001085 default:
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001086 pMemoryRequirements->size = 0;
1087 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001088 }
1089
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001090 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001091}
1092
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001093VkResult anv_BindObjectMemory(
1094 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001095 VkObjectType objType,
1096 VkObject object,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001097 VkDeviceMemory _mem,
1098 VkDeviceSize memOffset)
1099{
1100 struct anv_buffer *buffer;
1101 struct anv_image *image;
1102 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1103
1104 switch (objType) {
1105 case VK_OBJECT_TYPE_BUFFER:
1106 buffer = (struct anv_buffer *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001107 buffer->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001108 buffer->offset = memOffset;
1109 break;
1110 case VK_OBJECT_TYPE_IMAGE:
1111 image = (struct anv_image *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001112 image->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001113 image->offset = memOffset;
1114 break;
1115 default:
1116 break;
1117 }
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001118
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001119 return VK_SUCCESS;
1120}
1121
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001122VkResult anv_QueueBindSparseBufferMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001123 VkQueue queue,
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001124 VkBuffer buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001125 VkDeviceSize rangeOffset,
1126 VkDeviceSize rangeSize,
1127 VkDeviceMemory mem,
1128 VkDeviceSize memOffset)
1129{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001130 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001131}
1132
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001133VkResult anv_QueueBindSparseImageMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001134 VkQueue queue,
1135 VkImage image,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001136 const VkImageMemoryBindInfo* pBindInfo,
1137 VkDeviceMemory mem,
1138 VkDeviceSize memOffset)
1139{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001140 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001141}
1142
Jason Ekstrand57153da2015-05-22 15:15:08 -07001143static void
1144anv_fence_destroy(struct anv_device *device,
1145 struct anv_object *object,
1146 VkObjectType obj_type)
1147{
1148 struct anv_fence *fence = (struct anv_fence *) object;
1149
1150 assert(obj_type == VK_OBJECT_TYPE_FENCE);
1151
1152 anv_gem_munmap(fence->bo.map, fence->bo.size);
1153 anv_gem_close(device, fence->bo.gem_handle);
1154 anv_device_free(device, fence);
1155}
1156
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001157VkResult anv_CreateFence(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001158 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001159 const VkFenceCreateInfo* pCreateInfo,
1160 VkFence* pFence)
1161{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001162 struct anv_device *device = (struct anv_device *) _device;
1163 struct anv_fence *fence;
1164 struct anv_batch batch;
1165 VkResult result;
1166
1167 const uint32_t fence_size = 128;
1168
1169 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1170
1171 fence = anv_device_alloc(device, sizeof(*fence), 8,
1172 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1173 if (fence == NULL)
1174 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1175
1176 result = anv_bo_init_new(&fence->bo, device, fence_size);
1177 if (result != VK_SUCCESS)
1178 goto fail;
1179
Jason Ekstrand57153da2015-05-22 15:15:08 -07001180 fence->base.destructor = anv_fence_destroy;
1181
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001182 fence->bo.map =
1183 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07001184 batch.next = batch.start = fence->bo.map;
1185 batch.end = fence->bo.map + fence->bo.size;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001186 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1187 anv_batch_emit(&batch, GEN8_MI_NOOP);
1188
1189 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1190 fence->exec2_objects[0].relocation_count = 0;
1191 fence->exec2_objects[0].relocs_ptr = 0;
1192 fence->exec2_objects[0].alignment = 0;
1193 fence->exec2_objects[0].offset = fence->bo.offset;
1194 fence->exec2_objects[0].flags = 0;
1195 fence->exec2_objects[0].rsvd1 = 0;
1196 fence->exec2_objects[0].rsvd2 = 0;
1197
1198 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1199 fence->execbuf.buffer_count = 1;
1200 fence->execbuf.batch_start_offset = 0;
1201 fence->execbuf.batch_len = batch.next - fence->bo.map;
1202 fence->execbuf.cliprects_ptr = 0;
1203 fence->execbuf.num_cliprects = 0;
1204 fence->execbuf.DR1 = 0;
1205 fence->execbuf.DR4 = 0;
1206
1207 fence->execbuf.flags =
1208 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1209 fence->execbuf.rsvd1 = device->context_id;
1210 fence->execbuf.rsvd2 = 0;
1211
Chad Versace87d98e12015-06-04 14:31:53 -07001212 *pFence = (VkFence) fence;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001213
1214 return VK_SUCCESS;
1215
1216 fail:
1217 anv_device_free(device, fence);
1218
1219 return result;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001220}
1221
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001222VkResult anv_ResetFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001223 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001224 uint32_t fenceCount,
Jason Ekstrandd5349b12015-07-07 17:18:00 -07001225 const VkFence* pFences)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001226{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001227 struct anv_fence **fences = (struct anv_fence **) pFences;
1228
Kristian Høgsberg Kristensen52637c02015-06-05 11:51:30 -07001229 for (uint32_t i = 0; i < fenceCount; i++)
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001230 fences[i]->ready = false;
1231
1232 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001233}
1234
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001235VkResult anv_GetFenceStatus(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001236 VkDevice _device,
1237 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001238{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001239 struct anv_device *device = (struct anv_device *) _device;
1240 struct anv_fence *fence = (struct anv_fence *) _fence;
1241 int64_t t = 0;
1242 int ret;
1243
1244 if (fence->ready)
1245 return VK_SUCCESS;
1246
1247 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1248 if (ret == 0) {
1249 fence->ready = true;
1250 return VK_SUCCESS;
1251 }
1252
1253 return VK_NOT_READY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001254}
1255
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001256VkResult anv_WaitForFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001257 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001258 uint32_t fenceCount,
1259 const VkFence* pFences,
1260 bool32_t waitAll,
1261 uint64_t timeout)
1262{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001263 struct anv_device *device = (struct anv_device *) _device;
1264 struct anv_fence **fences = (struct anv_fence **) pFences;
1265 int64_t t = timeout;
1266 int ret;
1267
1268 /* FIXME: handle !waitAll */
1269
1270 for (uint32_t i = 0; i < fenceCount; i++) {
1271 ret = anv_gem_wait(device, fences[i]->bo.gem_handle, &t);
1272 if (ret == -1 && errno == ETIME)
1273 return VK_TIMEOUT;
1274 else if (ret == -1)
1275 return vk_error(VK_ERROR_UNKNOWN);
1276 }
1277
1278 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001279}
1280
1281// Queue semaphore functions
1282
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001283VkResult anv_CreateSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001284 VkDevice device,
1285 const VkSemaphoreCreateInfo* pCreateInfo,
1286 VkSemaphore* pSemaphore)
1287{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001288 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001289}
1290
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001291VkResult anv_QueueSignalSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001292 VkQueue queue,
1293 VkSemaphore semaphore)
1294{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001295 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001296}
1297
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001298VkResult anv_QueueWaitSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001299 VkQueue queue,
1300 VkSemaphore semaphore)
1301{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001302 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001303}
1304
1305// Event functions
1306
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001307VkResult anv_CreateEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001308 VkDevice device,
1309 const VkEventCreateInfo* pCreateInfo,
1310 VkEvent* pEvent)
1311{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001312 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001313}
1314
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001315VkResult anv_GetEventStatus(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001316 VkDevice device,
1317 VkEvent event)
1318{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001319 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001320}
1321
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001322VkResult anv_SetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001323 VkDevice device,
1324 VkEvent event)
1325{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001326 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001327}
1328
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001329VkResult anv_ResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001330 VkDevice device,
1331 VkEvent event)
1332{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001333 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001334}
1335
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001336// Buffer functions
1337
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001338VkResult anv_CreateBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001339 VkDevice _device,
1340 const VkBufferCreateInfo* pCreateInfo,
1341 VkBuffer* pBuffer)
1342{
1343 struct anv_device *device = (struct anv_device *) _device;
1344 struct anv_buffer *buffer;
1345
1346 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1347
1348 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1349 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1350 if (buffer == NULL)
1351 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1352
1353 buffer->size = pCreateInfo->size;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001354 buffer->bo = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001355 buffer->offset = 0;
1356
1357 *pBuffer = (VkBuffer) buffer;
1358
1359 return VK_SUCCESS;
1360}
1361
1362// Buffer view functions
1363
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001364static void
1365fill_buffer_surface_state(void *state, VkFormat format,
1366 uint32_t offset, uint32_t range)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001367{
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001368 const struct anv_format *info;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001369
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001370 info = anv_format_for_vk_format(format);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001371 /* This assumes RGBA float format. */
1372 uint32_t stride = 4;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001373 uint32_t num_elements = range / stride;
1374
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001375 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1376 .SurfaceType = SURFTYPE_BUFFER,
1377 .SurfaceArray = false,
Chad Versace4c814632015-06-25 18:18:06 -07001378 .SurfaceFormat = info->surface_format,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001379 .SurfaceVerticalAlignment = VALIGN4,
1380 .SurfaceHorizontalAlignment = HALIGN4,
1381 .TileMode = LINEAR,
1382 .VerticalLineStride = 0,
1383 .VerticalLineStrideOffset = 0,
1384 .SamplerL2BypassModeDisable = true,
1385 .RenderCacheReadWriteMode = WriteOnlyCache,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07001386 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001387 .BaseMipLevel = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001388 .SurfaceQPitch = 0,
1389 .Height = (num_elements >> 7) & 0x3fff,
1390 .Width = num_elements & 0x7f,
1391 .Depth = (num_elements >> 21) & 0x3f,
1392 .SurfacePitch = stride - 1,
1393 .MinimumArrayElement = 0,
1394 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1395 .XOffset = 0,
1396 .YOffset = 0,
1397 .SurfaceMinLOD = 0,
1398 .MIPCountLOD = 0,
1399 .AuxiliarySurfaceMode = AUX_NONE,
1400 .RedClearColor = 0,
1401 .GreenClearColor = 0,
1402 .BlueClearColor = 0,
1403 .AlphaClearColor = 0,
1404 .ShaderChannelSelectRed = SCS_RED,
1405 .ShaderChannelSelectGreen = SCS_GREEN,
1406 .ShaderChannelSelectBlue = SCS_BLUE,
1407 .ShaderChannelSelectAlpha = SCS_ALPHA,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001408 .ResourceMinLOD = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001409 /* FIXME: We assume that the image must be bound at this time. */
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001410 .SurfaceBaseAddress = { NULL, offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001411 };
1412
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001413 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1414}
1415
1416VkResult anv_CreateBufferView(
1417 VkDevice _device,
1418 const VkBufferViewCreateInfo* pCreateInfo,
1419 VkBufferView* pView)
1420{
1421 struct anv_device *device = (struct anv_device *) _device;
1422 struct anv_buffer *buffer = (struct anv_buffer *) pCreateInfo->buffer;
1423 struct anv_surface_view *view;
1424
1425 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1426
1427 view = anv_device_alloc(device, sizeof(*view), 8,
1428 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1429 if (view == NULL)
1430 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1431
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001432 view->base.destructor = anv_surface_view_destroy;
1433
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001434 view->bo = buffer->bo;
1435 view->offset = buffer->offset + pCreateInfo->offset;
1436 view->surface_state =
1437 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1438 view->format = pCreateInfo->format;
1439 view->range = pCreateInfo->range;
1440
1441 fill_buffer_surface_state(view->surface_state.map,
1442 pCreateInfo->format, view->offset, pCreateInfo->range);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001443
Chad Versace87d98e12015-06-04 14:31:53 -07001444 *pView = (VkBufferView) view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001445
1446 return VK_SUCCESS;
1447}
1448
1449// Sampler functions
1450
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001451VkResult anv_CreateSampler(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001452 VkDevice _device,
1453 const VkSamplerCreateInfo* pCreateInfo,
1454 VkSampler* pSampler)
1455{
1456 struct anv_device *device = (struct anv_device *) _device;
1457 struct anv_sampler *sampler;
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001458 uint32_t mag_filter, min_filter, max_anisotropy;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001459
Kristian Høgsberg18acfa72015-05-13 13:53:01 -07001460 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001461
1462 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1463 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1464 if (!sampler)
1465 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1466
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001467 static const uint32_t vk_to_gen_tex_filter[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001468 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1469 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001470 };
1471
1472 static const uint32_t vk_to_gen_mipmap_mode[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001473 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1474 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1475 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001476 };
1477
1478 static const uint32_t vk_to_gen_tex_address[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001479 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1480 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1481 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1482 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1483 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001484 };
1485
1486 static const uint32_t vk_to_gen_compare_op[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001487 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1488 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1489 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1490 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1491 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1492 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1493 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1494 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001495 };
1496
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001497 if (pCreateInfo->maxAnisotropy > 1) {
1498 mag_filter = MAPFILTER_ANISOTROPIC;
1499 min_filter = MAPFILTER_ANISOTROPIC;
1500 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1501 } else {
1502 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1503 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1504 max_anisotropy = RATIO21;
1505 }
1506
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001507 struct GEN8_SAMPLER_STATE sampler_state = {
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001508 .SamplerDisable = false,
1509 .TextureBorderColorMode = DX10OGL,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001510 .LODPreClampMode = 0,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001511 .BaseMipLevel = 0.0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001512 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001513 .MagModeFilter = mag_filter,
1514 .MinModeFilter = min_filter,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001515 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1516 .AnisotropicAlgorithm = EWAApproximation,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001517 .MinLOD = pCreateInfo->minLod,
1518 .MaxLOD = pCreateInfo->maxLod,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001519 .ChromaKeyEnable = 0,
1520 .ChromaKeyIndex = 0,
1521 .ChromaKeyMode = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001522 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001523 .CubeSurfaceControlMode = 0,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001524
1525 .IndirectStatePointer =
Jason Ekstrand522ab832015-07-08 11:44:52 -07001526 device->border_colors.offset +
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001527 pCreateInfo->borderColor * sizeof(float) * 4,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001528
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001529 .LODClampMagnificationMode = MIPNONE,
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001530 .MaximumAnisotropy = max_anisotropy,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001531 .RAddressMinFilterRoundingEnable = 0,
1532 .RAddressMagFilterRoundingEnable = 0,
1533 .VAddressMinFilterRoundingEnable = 0,
1534 .VAddressMagFilterRoundingEnable = 0,
1535 .UAddressMinFilterRoundingEnable = 0,
1536 .UAddressMagFilterRoundingEnable = 0,
1537 .TrilinearFilterQuality = 0,
1538 .NonnormalizedCoordinateEnable = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001539 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1540 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1541 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001542 };
1543
1544 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1545
1546 *pSampler = (VkSampler) sampler;
1547
1548 return VK_SUCCESS;
1549}
1550
1551// Descriptor set functions
1552
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001553VkResult anv_CreateDescriptorSetLayout(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001554 VkDevice _device,
1555 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1556 VkDescriptorSetLayout* pSetLayout)
1557{
1558 struct anv_device *device = (struct anv_device *) _device;
1559 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001560
1561 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1562
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001563 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1564 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001565 uint32_t num_dynamic_buffers = 0;
1566 uint32_t count = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07001567 uint32_t stages = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001568 uint32_t s;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001569
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001570 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001571 switch (pCreateInfo->pBinding[i].descriptorType) {
1572 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001573 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001574 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001575 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001576 break;
1577 default:
1578 break;
1579 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001580
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001581 switch (pCreateInfo->pBinding[i].descriptorType) {
1582 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001583 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1584 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1585 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1586 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1587 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1588 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1589 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1590 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001591 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001592 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001593 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001594 default:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001595 break;
1596 }
1597
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001598 switch (pCreateInfo->pBinding[i].descriptorType) {
1599 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1600 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Jason Ekstrand63c11902015-07-06 17:43:58 -07001601 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001602 break;
1603 default:
1604 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001605 }
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001606
Jason Ekstrand22513052015-05-30 10:07:29 -07001607 stages |= pCreateInfo->pBinding[i].stageFlags;
Jason Ekstrand63c11902015-07-06 17:43:58 -07001608 count += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001609 }
1610
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001611 uint32_t sampler_total = 0;
1612 uint32_t surface_total = 0;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001613 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001614 sampler_total += sampler_count[s];
1615 surface_total += surface_count[s];
1616 }
1617
1618 size_t size = sizeof(*set_layout) +
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001619 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001620 set_layout = anv_device_alloc(device, size, 8,
1621 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1622 if (!set_layout)
1623 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1624
1625 set_layout->num_dynamic_buffers = num_dynamic_buffers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001626 set_layout->count = count;
Jason Ekstrand22513052015-05-30 10:07:29 -07001627 set_layout->shader_stages = stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001628
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001629 struct anv_descriptor_slot *p = set_layout->entries;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001630 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1631 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1632 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001633 set_layout->stage[s].surface_count = surface_count[s];
1634 set_layout->stage[s].surface_start = surface[s] = p;
1635 p += surface_count[s];
1636 set_layout->stage[s].sampler_count = sampler_count[s];
1637 set_layout->stage[s].sampler_start = sampler[s] = p;
1638 p += sampler_count[s];
1639 }
1640
1641 uint32_t descriptor = 0;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001642 int8_t dynamic_slot = 0;
1643 bool is_dynamic;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001644 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1645 switch (pCreateInfo->pBinding[i].descriptorType) {
1646 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001647 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1648 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001649 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001650 sampler[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001651 sampler[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001652 sampler[s]++;
1653 }
1654 break;
1655 default:
1656 break;
1657 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001658
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001659 switch (pCreateInfo->pBinding[i].descriptorType) {
1660 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1661 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001662 is_dynamic = true;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001663 break;
1664 default:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001665 is_dynamic = false;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001666 break;
1667 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001668
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001669 switch (pCreateInfo->pBinding[i].descriptorType) {
1670 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001671 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1672 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1673 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1674 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1675 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1676 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1677 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1678 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1679 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001680 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001681 surface[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001682 if (is_dynamic)
1683 surface[s]->dynamic_slot = dynamic_slot + j;
1684 else
1685 surface[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001686 surface[s]++;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001687 }
1688 break;
1689 default:
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001690 break;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001691 }
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001692
1693 if (is_dynamic)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001694 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001695
Jason Ekstrand63c11902015-07-06 17:43:58 -07001696 descriptor += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001697 }
1698
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001699 *pSetLayout = (VkDescriptorSetLayout) set_layout;
1700
1701 return VK_SUCCESS;
1702}
1703
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001704VkResult anv_CreateDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001705 VkDevice device,
1706 VkDescriptorPoolUsage poolUsage,
1707 uint32_t maxSets,
1708 const VkDescriptorPoolCreateInfo* pCreateInfo,
1709 VkDescriptorPool* pDescriptorPool)
1710{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001711 *pDescriptorPool = 1;
1712
1713 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001714}
1715
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001716VkResult anv_ResetDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001717 VkDevice device,
1718 VkDescriptorPool descriptorPool)
1719{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001720 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001721}
1722
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001723VkResult anv_AllocDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001724 VkDevice _device,
1725 VkDescriptorPool descriptorPool,
1726 VkDescriptorSetUsage setUsage,
1727 uint32_t count,
1728 const VkDescriptorSetLayout* pSetLayouts,
1729 VkDescriptorSet* pDescriptorSets,
1730 uint32_t* pCount)
1731{
1732 struct anv_device *device = (struct anv_device *) _device;
1733 const struct anv_descriptor_set_layout *layout;
1734 struct anv_descriptor_set *set;
1735 size_t size;
1736
1737 for (uint32_t i = 0; i < count; i++) {
1738 layout = (struct anv_descriptor_set_layout *) pSetLayouts[i];
Kristian Høgsberga77229c2015-05-13 11:49:30 -07001739 size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001740 set = anv_device_alloc(device, size, 8,
1741 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1742 if (!set) {
1743 *pCount = i;
1744 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1745 }
1746
Jason Ekstrand0a547512015-05-21 16:33:04 -07001747 /* Descriptor sets may not be 100% filled out so we need to memset to
1748 * ensure that we can properly detect and handle holes.
1749 */
1750 memset(set, 0, size);
1751
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001752 pDescriptorSets[i] = (VkDescriptorSet) set;
1753 }
1754
1755 *pCount = count;
1756
Kristian Høgsbergb4b3bd12015-05-17 18:39:12 -07001757 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001758}
1759
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001760void anv_UpdateDescriptors(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001761 VkDevice _device,
1762 VkDescriptorSet descriptorSet,
1763 uint32_t updateCount,
1764 const void** ppUpdateArray)
1765{
1766 struct anv_descriptor_set *set = (struct anv_descriptor_set *) descriptorSet;
1767 VkUpdateSamplers *update_samplers;
1768 VkUpdateSamplerTextures *update_sampler_textures;
1769 VkUpdateImages *update_images;
1770 VkUpdateBuffers *update_buffers;
1771 VkUpdateAsCopy *update_as_copy;
1772
1773 for (uint32_t i = 0; i < updateCount; i++) {
1774 const struct anv_common *common = ppUpdateArray[i];
1775
1776 switch (common->sType) {
1777 case VK_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1778 update_samplers = (VkUpdateSamplers *) common;
1779
1780 for (uint32_t j = 0; j < update_samplers->count; j++) {
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001781 set->descriptors[update_samplers->binding + j].sampler =
1782 (struct anv_sampler *) update_samplers->pSamplers[j];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001783 }
1784 break;
1785
1786 case VK_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1787 /* FIXME: Shouldn't this be *_UPDATE_SAMPLER_IMAGES? */
1788 update_sampler_textures = (VkUpdateSamplerTextures *) common;
1789
1790 for (uint32_t j = 0; j < update_sampler_textures->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001791 set->descriptors[update_sampler_textures->binding + j].view =
1792 (struct anv_surface_view *)
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001793 update_sampler_textures->pSamplerImageViews[j].pImageView->view;
1794 set->descriptors[update_sampler_textures->binding + j].sampler =
1795 (struct anv_sampler *)
1796 update_sampler_textures->pSamplerImageViews[j].sampler;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001797 }
1798 break;
1799
1800 case VK_STRUCTURE_TYPE_UPDATE_IMAGES:
1801 update_images = (VkUpdateImages *) common;
1802
1803 for (uint32_t j = 0; j < update_images->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001804 set->descriptors[update_images->binding + j].view =
1805 (struct anv_surface_view *) update_images->pImageViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001806 }
1807 break;
1808
1809 case VK_STRUCTURE_TYPE_UPDATE_BUFFERS:
1810 update_buffers = (VkUpdateBuffers *) common;
1811
1812 for (uint32_t j = 0; j < update_buffers->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001813 set->descriptors[update_buffers->binding + j].view =
1814 (struct anv_surface_view *) update_buffers->pBufferViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001815 }
1816 /* FIXME: descriptor arrays? */
1817 break;
1818
1819 case VK_STRUCTURE_TYPE_UPDATE_AS_COPY:
1820 update_as_copy = (VkUpdateAsCopy *) common;
1821 (void) update_as_copy;
1822 break;
1823
1824 default:
1825 break;
1826 }
1827 }
1828}
1829
1830// State object functions
1831
1832static inline int64_t
1833clamp_int64(int64_t x, int64_t min, int64_t max)
1834{
1835 if (x < min)
1836 return min;
1837 else if (x < max)
1838 return x;
1839 else
1840 return max;
1841}
1842
Jason Ekstrand57153da2015-05-22 15:15:08 -07001843static void
1844anv_dynamic_vp_state_destroy(struct anv_device *device,
1845 struct anv_object *object,
1846 VkObjectType obj_type)
1847{
1848 struct anv_dynamic_vp_state *state = (void *)object;
1849
1850 assert(obj_type == VK_OBJECT_TYPE_DYNAMIC_VP_STATE);
1851
1852 anv_state_pool_free(&device->dynamic_state_pool, state->sf_clip_vp);
1853 anv_state_pool_free(&device->dynamic_state_pool, state->cc_vp);
1854 anv_state_pool_free(&device->dynamic_state_pool, state->scissor);
1855
1856 anv_device_free(device, state);
1857}
1858
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001859VkResult anv_CreateDynamicViewportState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001860 VkDevice _device,
1861 const VkDynamicVpStateCreateInfo* pCreateInfo,
1862 VkDynamicVpState* pState)
1863{
1864 struct anv_device *device = (struct anv_device *) _device;
1865 struct anv_dynamic_vp_state *state;
1866
1867 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO);
1868
1869 state = anv_device_alloc(device, sizeof(*state), 8,
1870 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1871 if (state == NULL)
1872 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1873
Jason Ekstrand57153da2015-05-22 15:15:08 -07001874 state->base.destructor = anv_dynamic_vp_state_destroy;
1875
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001876 unsigned count = pCreateInfo->viewportAndScissorCount;
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001877 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001878 count * 64, 64);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001879 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001880 count * 8, 32);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001881 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001882 count * 32, 32);
1883
1884 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1885 const VkViewport *vp = &pCreateInfo->pViewports[i];
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07001886 const VkRect2D *s = &pCreateInfo->pScissors[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001887
1888 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1889 .ViewportMatrixElementm00 = vp->width / 2,
1890 .ViewportMatrixElementm11 = vp->height / 2,
1891 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1892 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1893 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1894 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1895 .XMinClipGuardband = -1.0f,
1896 .XMaxClipGuardband = 1.0f,
1897 .YMinClipGuardband = -1.0f,
1898 .YMaxClipGuardband = 1.0f,
1899 .XMinViewPort = vp->originX,
1900 .XMaxViewPort = vp->originX + vp->width - 1,
1901 .YMinViewPort = vp->originY,
1902 .YMaxViewPort = vp->originY + vp->height - 1,
1903 };
1904
1905 struct GEN8_CC_VIEWPORT cc_viewport = {
1906 .MinimumDepth = vp->minDepth,
1907 .MaximumDepth = vp->maxDepth
1908 };
1909
1910 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1911 * ymax < ymin for empty clips. In case clip x, y, width height are all
1912 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1913 * what we want. Just special case empty clips and produce a canonical
1914 * empty clip. */
1915 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1916 .ScissorRectangleYMin = 1,
1917 .ScissorRectangleXMin = 1,
1918 .ScissorRectangleYMax = 0,
1919 .ScissorRectangleXMax = 0
1920 };
1921
1922 const int max = 0xffff;
1923 struct GEN8_SCISSOR_RECT scissor = {
1924 /* Do this math using int64_t so overflow gets clamped correctly. */
1925 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
1926 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
1927 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
1928 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
1929 };
1930
1931 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
1932 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
1933
1934 if (s->extent.width <= 0 || s->extent.height <= 0) {
1935 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
1936 } else {
1937 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
1938 }
1939 }
1940
1941 *pState = (VkDynamicVpState) state;
1942
1943 return VK_SUCCESS;
1944}
1945
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001946VkResult anv_CreateDynamicRasterState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001947 VkDevice _device,
1948 const VkDynamicRsStateCreateInfo* pCreateInfo,
1949 VkDynamicRsState* pState)
1950{
1951 struct anv_device *device = (struct anv_device *) _device;
1952 struct anv_dynamic_rs_state *state;
1953
1954 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RS_STATE_CREATE_INFO);
1955
1956 state = anv_device_alloc(device, sizeof(*state), 8,
1957 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1958 if (state == NULL)
1959 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1960
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001961 struct GEN8_3DSTATE_SF sf = {
1962 GEN8_3DSTATE_SF_header,
1963 .LineWidth = pCreateInfo->lineWidth,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001964 };
1965
1966 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
1967
Kristian Høgsberg99883772015-05-26 09:40:10 -07001968 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
1969 pCreateInfo->slopeScaledDepthBias != 0.0f;
1970 struct GEN8_3DSTATE_RASTER raster = {
1971 .GlobalDepthOffsetEnableSolid = enable_bias,
1972 .GlobalDepthOffsetEnableWireframe = enable_bias,
1973 .GlobalDepthOffsetEnablePoint = enable_bias,
1974 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
1975 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
1976 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
1977 };
1978
1979 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
1980
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001981 *pState = (VkDynamicRsState) state;
1982
1983 return VK_SUCCESS;
1984}
1985
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001986VkResult anv_CreateDynamicColorBlendState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001987 VkDevice _device,
1988 const VkDynamicCbStateCreateInfo* pCreateInfo,
1989 VkDynamicCbState* pState)
1990{
1991 struct anv_device *device = (struct anv_device *) _device;
1992 struct anv_dynamic_cb_state *state;
1993
1994 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_CB_STATE_CREATE_INFO);
1995
1996 state = anv_device_alloc(device, sizeof(*state), 8,
1997 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1998 if (state == NULL)
1999 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2000
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002001 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2002 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2003 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2004 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2005 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2006 };
2007
2008 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2009
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002010 *pState = (VkDynamicCbState) state;
2011
2012 return VK_SUCCESS;
2013}
2014
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002015VkResult anv_CreateDynamicDepthStencilState(
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002016 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002017 const VkDynamicDsStateCreateInfo* pCreateInfo,
2018 VkDynamicDsState* pState)
2019{
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002020 struct anv_device *device = (struct anv_device *) _device;
2021 struct anv_dynamic_ds_state *state;
2022
2023 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DS_STATE_CREATE_INFO);
2024
2025 state = anv_device_alloc(device, sizeof(*state), 8,
2026 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2027 if (state == NULL)
2028 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2029
2030 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2031 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2032
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002033 /* Is this what we need to do? */
2034 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2035
Jason Ekstrand251aea82015-06-03 16:59:13 -07002036 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2037 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002038
Jason Ekstrand251aea82015-06-03 16:59:13 -07002039 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2040 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002041 };
2042
2043 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2044 &wm_depth_stencil);
2045
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002046 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2047 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2048 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2049 };
2050
2051 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2052
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002053 *pState = (VkDynamicDsState) state;
2054
2055 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002056}
2057
2058// Command buffer functions
2059
Jason Ekstrand57153da2015-05-22 15:15:08 -07002060static void
2061anv_cmd_buffer_destroy(struct anv_device *device,
2062 struct anv_object *object,
2063 VkObjectType obj_type)
2064{
2065 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) object;
2066
2067 assert(obj_type == VK_OBJECT_TYPE_COMMAND_BUFFER);
2068
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002069 /* Destroy all of the batch buffers */
2070 struct anv_batch_bo *bbo = cmd_buffer->last_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002071 while (bbo) {
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002072 struct anv_batch_bo *prev = bbo->prev_batch_bo;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002073 anv_batch_bo_destroy(bbo, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002074 bbo = prev;
2075 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002076 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002077
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002078 /* Destroy all of the surface state buffers */
2079 bbo = cmd_buffer->surface_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002080 while (bbo) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002081 struct anv_batch_bo *prev = bbo->prev_batch_bo;
2082 anv_batch_bo_destroy(bbo, device);
2083 bbo = prev;
2084 }
Jason Ekstrand403266b2015-05-25 17:38:15 -07002085 anv_reloc_list_finish(&cmd_buffer->surface_relocs, device);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002086
Jason Ekstrand57153da2015-05-22 15:15:08 -07002087 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
2088 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
Jason Ekstrand57153da2015-05-22 15:15:08 -07002089 anv_device_free(device, cmd_buffer->exec2_objects);
2090 anv_device_free(device, cmd_buffer->exec2_bos);
2091 anv_device_free(device, cmd_buffer);
2092}
2093
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002094static VkResult
2095anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
2096{
2097 struct anv_cmd_buffer *cmd_buffer = _data;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002098
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002099 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->last_batch_bo;
2100
2101 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2102 if (result != VK_SUCCESS)
2103 return result;
2104
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002105 /* We set the end of the batch a little short so we would be sure we
2106 * have room for the chaining command. Since we're about to emit the
2107 * chaining command, let's set it back where it should go.
2108 */
2109 batch->end += GEN8_MI_BATCH_BUFFER_START_length * 4;
2110 assert(batch->end == old_bbo->bo.map + old_bbo->bo.size);
2111
2112 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_START,
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002113 GEN8_MI_BATCH_BUFFER_START_header,
2114 ._2ndLevelBatchBuffer = _1stlevelbatch,
2115 .AddressSpaceIndicator = ASI_PPGTT,
2116 .BatchBufferStartAddress = { &new_bbo->bo, 0 },
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002117 );
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002118
2119 /* Pad out to a 2-dword aligned boundary with zeros */
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002120 if ((uintptr_t)batch->next % 8 != 0) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002121 *(uint32_t *)batch->next = 0;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002122 batch->next += 4;
2123 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002124
2125 anv_batch_bo_finish(cmd_buffer->last_batch_bo, batch);
2126
2127 new_bbo->prev_batch_bo = old_bbo;
2128 cmd_buffer->last_batch_bo = new_bbo;
2129
2130 anv_batch_bo_start(new_bbo, batch, GEN8_MI_BATCH_BUFFER_START_length * 4);
2131
2132 return VK_SUCCESS;
2133}
2134
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002135VkResult anv_CreateCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002136 VkDevice _device,
2137 const VkCmdBufferCreateInfo* pCreateInfo,
2138 VkCmdBuffer* pCmdBuffer)
2139{
2140 struct anv_device *device = (struct anv_device *) _device;
2141 struct anv_cmd_buffer *cmd_buffer;
2142 VkResult result;
2143
Jason Ekstrande19d6be2015-07-08 10:53:32 -07002144 assert(pCreateInfo->level == VK_CMD_BUFFER_LEVEL_PRIMARY);
2145
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002146 cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
2147 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2148 if (cmd_buffer == NULL)
2149 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2150
Jason Ekstrand57153da2015-05-22 15:15:08 -07002151 cmd_buffer->base.destructor = anv_cmd_buffer_destroy;
2152
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002153 cmd_buffer->device = device;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002154 cmd_buffer->rs_state = NULL;
2155 cmd_buffer->vp_state = NULL;
Kristian Høgsberg Kristensen5744d172015-06-02 22:51:42 -07002156 cmd_buffer->cb_state = NULL;
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002157 cmd_buffer->ds_state = NULL;
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002158 memset(&cmd_buffer->state_vf, 0, sizeof(cmd_buffer->state_vf));
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002159 memset(&cmd_buffer->descriptors, 0, sizeof(cmd_buffer->descriptors));
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002160
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002161 result = anv_batch_bo_create(device, &cmd_buffer->last_batch_bo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002162 if (result != VK_SUCCESS)
2163 goto fail;
2164
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002165 result = anv_reloc_list_init(&cmd_buffer->batch.relocs, device);
2166 if (result != VK_SUCCESS)
2167 goto fail_batch_bo;
2168
2169 cmd_buffer->batch.device = device;
2170 cmd_buffer->batch.extend_cb = anv_cmd_buffer_chain_batch;
2171 cmd_buffer->batch.user_data = cmd_buffer;
2172
2173 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2174 GEN8_MI_BATCH_BUFFER_START_length * 4);
2175
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002176 result = anv_batch_bo_create(device, &cmd_buffer->surface_batch_bo);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002177 if (result != VK_SUCCESS)
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002178 goto fail_batch_relocs;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002179 cmd_buffer->surface_batch_bo->first_reloc = 0;
2180
2181 result = anv_reloc_list_init(&cmd_buffer->surface_relocs, device);
2182 if (result != VK_SUCCESS)
2183 goto fail_ss_batch_bo;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002184
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002185 /* Start surface_next at 1 so surface offset 0 is invalid. */
2186 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002187
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002188 cmd_buffer->exec2_objects = NULL;
2189 cmd_buffer->exec2_bos = NULL;
2190 cmd_buffer->exec2_array_length = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002191
2192 anv_state_stream_init(&cmd_buffer->surface_state_stream,
2193 &device->surface_state_block_pool);
Kristian Høgsberga1ec7892015-05-13 13:51:08 -07002194 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002195 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002196
2197 cmd_buffer->dirty = 0;
2198 cmd_buffer->vb_dirty = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07002199 cmd_buffer->descriptors_dirty = 0;
Jason Ekstrandae8c93e2015-05-25 17:08:11 -07002200 cmd_buffer->pipeline = NULL;
Kristian Høgsberg Kristensen5a317ef2015-05-27 21:45:23 -07002201 cmd_buffer->vp_state = NULL;
2202 cmd_buffer->rs_state = NULL;
2203 cmd_buffer->ds_state = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002204
2205 *pCmdBuffer = (VkCmdBuffer) cmd_buffer;
2206
2207 return VK_SUCCESS;
2208
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002209 fail_ss_batch_bo:
2210 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, device);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002211 fail_batch_relocs:
2212 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
2213 fail_batch_bo:
2214 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002215 fail:
2216 anv_device_free(device, cmd_buffer);
2217
2218 return result;
2219}
2220
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002221static void
2222anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer *cmd_buffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002223{
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002224 struct anv_device *device = cmd_buffer->device;
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002225 struct anv_bo *scratch_bo = NULL;
2226
2227 cmd_buffer->scratch_size = device->scratch_block_pool.size;
2228 if (cmd_buffer->scratch_size > 0)
2229 scratch_bo = &device->scratch_block_pool.bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002230
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002231 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_BASE_ADDRESS,
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002232 .GeneralStateBaseAddress = { scratch_bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002233 .GeneralStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002234 .GeneralStateBaseAddressModifyEnable = true,
2235 .GeneralStateBufferSize = 0xfffff,
2236 .GeneralStateBufferSizeModifyEnable = true,
2237
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002238 .SurfaceStateBaseAddress = { &cmd_buffer->surface_batch_bo->bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002239 .SurfaceStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002240 .SurfaceStateBaseAddressModifyEnable = true,
2241
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002242 .DynamicStateBaseAddress = { &device->dynamic_state_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002243 .DynamicStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002244 .DynamicStateBaseAddressModifyEnable = true,
2245 .DynamicStateBufferSize = 0xfffff,
2246 .DynamicStateBufferSizeModifyEnable = true,
2247
2248 .IndirectObjectBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002249 .IndirectObjectMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002250 .IndirectObjectBaseAddressModifyEnable = true,
2251 .IndirectObjectBufferSize = 0xfffff,
2252 .IndirectObjectBufferSizeModifyEnable = true,
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002253
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002254 .InstructionBaseAddress = { &device->instruction_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002255 .InstructionMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002256 .InstructionBaseAddressModifyEnable = true,
2257 .InstructionBufferSize = 0xfffff,
2258 .InstructionBuffersizeModifyEnable = true);
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002259}
2260
2261VkResult anv_BeginCommandBuffer(
2262 VkCmdBuffer cmdBuffer,
2263 const VkCmdBufferBeginInfo* pBeginInfo)
2264{
2265 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2266
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002267 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07002268 cmd_buffer->current_pipeline = UINT32_MAX;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002269
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002270 return VK_SUCCESS;
2271}
2272
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002273static VkResult
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002274anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002275 struct anv_bo *bo,
2276 struct drm_i915_gem_relocation_entry *relocs,
2277 size_t num_relocs)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002278{
2279 struct drm_i915_gem_exec_object2 *obj;
2280
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002281 if (bo->index < cmd_buffer->bo_count &&
2282 cmd_buffer->exec2_bos[bo->index] == bo)
2283 return VK_SUCCESS;
2284
2285 if (cmd_buffer->bo_count >= cmd_buffer->exec2_array_length) {
2286 uint32_t new_len = cmd_buffer->exec2_objects ?
2287 cmd_buffer->exec2_array_length * 2 : 64;
2288
2289 struct drm_i915_gem_exec_object2 *new_objects =
2290 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_objects),
2291 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2292 if (new_objects == NULL)
2293 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2294
2295 struct anv_bo **new_bos =
2296 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_bos),
2297 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2298 if (new_objects == NULL) {
2299 anv_device_free(cmd_buffer->device, new_objects);
2300 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2301 }
2302
2303 if (cmd_buffer->exec2_objects) {
2304 memcpy(new_objects, cmd_buffer->exec2_objects,
2305 cmd_buffer->bo_count * sizeof(*new_objects));
2306 memcpy(new_bos, cmd_buffer->exec2_bos,
2307 cmd_buffer->bo_count * sizeof(*new_bos));
2308 }
2309
2310 cmd_buffer->exec2_objects = new_objects;
2311 cmd_buffer->exec2_bos = new_bos;
2312 cmd_buffer->exec2_array_length = new_len;
2313 }
2314
2315 assert(cmd_buffer->bo_count < cmd_buffer->exec2_array_length);
2316
2317 bo->index = cmd_buffer->bo_count++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002318 obj = &cmd_buffer->exec2_objects[bo->index];
2319 cmd_buffer->exec2_bos[bo->index] = bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002320
2321 obj->handle = bo->gem_handle;
2322 obj->relocation_count = 0;
2323 obj->relocs_ptr = 0;
2324 obj->alignment = 0;
2325 obj->offset = bo->offset;
2326 obj->flags = 0;
2327 obj->rsvd1 = 0;
2328 obj->rsvd2 = 0;
2329
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002330 if (relocs) {
2331 obj->relocation_count = num_relocs;
2332 obj->relocs_ptr = (uintptr_t) relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002333 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002334
2335 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002336}
2337
2338static void
2339anv_cmd_buffer_add_validate_bos(struct anv_cmd_buffer *cmd_buffer,
2340 struct anv_reloc_list *list)
2341{
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002342 for (size_t i = 0; i < list->num_relocs; i++)
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002343 anv_cmd_buffer_add_bo(cmd_buffer, list->reloc_bos[i], NULL, 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002344}
2345
2346static void
2347anv_cmd_buffer_process_relocs(struct anv_cmd_buffer *cmd_buffer,
2348 struct anv_reloc_list *list)
2349{
2350 struct anv_bo *bo;
2351
2352 /* If the kernel supports I915_EXEC_NO_RELOC, it will compare offset in
2353 * struct drm_i915_gem_exec_object2 against the bos current offset and if
2354 * all bos haven't moved it will skip relocation processing alltogether.
2355 * If I915_EXEC_NO_RELOC is not supported, the kernel ignores the incoming
2356 * value of offset so we can set it either way. For that to work we need
2357 * to make sure all relocs use the same presumed offset.
2358 */
2359
2360 for (size_t i = 0; i < list->num_relocs; i++) {
2361 bo = list->reloc_bos[i];
2362 if (bo->offset != list->relocs[i].presumed_offset)
2363 cmd_buffer->need_reloc = true;
2364
2365 list->relocs[i].target_handle = bo->index;
2366 }
2367}
2368
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002369VkResult anv_EndCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002370 VkCmdBuffer cmdBuffer)
2371{
2372 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2373 struct anv_device *device = cmd_buffer->device;
2374 struct anv_batch *batch = &cmd_buffer->batch;
2375
2376 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
2377
2378 /* Round batch up to an even number of dwords. */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002379 if ((batch->next - batch->start) & 4)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002380 anv_batch_emit(batch, GEN8_MI_NOOP);
2381
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002382 anv_batch_bo_finish(cmd_buffer->last_batch_bo, &cmd_buffer->batch);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002383 cmd_buffer->surface_batch_bo->num_relocs =
2384 cmd_buffer->surface_relocs.num_relocs - cmd_buffer->surface_batch_bo->first_reloc;
2385 cmd_buffer->surface_batch_bo->length = cmd_buffer->surface_next;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002386
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002387 cmd_buffer->bo_count = 0;
2388 cmd_buffer->need_reloc = false;
2389
2390 /* Lock for access to bo->index. */
2391 pthread_mutex_lock(&device->mutex);
2392
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002393 /* Add surface state bos first so we can add them with their relocs. */
2394 for (struct anv_batch_bo *bbo = cmd_buffer->surface_batch_bo;
2395 bbo != NULL; bbo = bbo->prev_batch_bo) {
2396 anv_cmd_buffer_add_bo(cmd_buffer, &bbo->bo,
2397 &cmd_buffer->surface_relocs.relocs[bbo->first_reloc],
2398 bbo->num_relocs);
2399 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002400
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002401 /* Add all of the BOs referenced by surface state */
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002402 anv_cmd_buffer_add_validate_bos(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002403
2404 /* Add all but the first batch BO */
2405 struct anv_batch_bo *batch_bo = cmd_buffer->last_batch_bo;
2406 while (batch_bo->prev_batch_bo) {
2407 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2408 &batch->relocs.relocs[batch_bo->first_reloc],
2409 batch_bo->num_relocs);
2410 batch_bo = batch_bo->prev_batch_bo;
2411 }
2412
2413 /* Add everything referenced by the batches */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002414 anv_cmd_buffer_add_validate_bos(cmd_buffer, &batch->relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002415
2416 /* Add the first batch bo last */
2417 assert(batch_bo->prev_batch_bo == NULL && batch_bo->first_reloc == 0);
2418 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2419 &batch->relocs.relocs[batch_bo->first_reloc],
2420 batch_bo->num_relocs);
2421 assert(batch_bo->bo.index == cmd_buffer->bo_count - 1);
2422
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002423 anv_cmd_buffer_process_relocs(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002424 anv_cmd_buffer_process_relocs(cmd_buffer, &batch->relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002425
2426 cmd_buffer->execbuf.buffers_ptr = (uintptr_t) cmd_buffer->exec2_objects;
2427 cmd_buffer->execbuf.buffer_count = cmd_buffer->bo_count;
2428 cmd_buffer->execbuf.batch_start_offset = 0;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002429 cmd_buffer->execbuf.batch_len = batch->next - batch->start;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002430 cmd_buffer->execbuf.cliprects_ptr = 0;
2431 cmd_buffer->execbuf.num_cliprects = 0;
2432 cmd_buffer->execbuf.DR1 = 0;
2433 cmd_buffer->execbuf.DR4 = 0;
2434
2435 cmd_buffer->execbuf.flags = I915_EXEC_HANDLE_LUT;
2436 if (!cmd_buffer->need_reloc)
2437 cmd_buffer->execbuf.flags |= I915_EXEC_NO_RELOC;
2438 cmd_buffer->execbuf.flags |= I915_EXEC_RENDER;
2439 cmd_buffer->execbuf.rsvd1 = device->context_id;
2440 cmd_buffer->execbuf.rsvd2 = 0;
2441
2442 pthread_mutex_unlock(&device->mutex);
2443
2444 return VK_SUCCESS;
2445}
2446
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002447VkResult anv_ResetCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002448 VkCmdBuffer cmdBuffer)
2449{
2450 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2451
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002452 /* Delete all but the first batch bo */
2453 while (cmd_buffer->last_batch_bo->prev_batch_bo) {
2454 struct anv_batch_bo *prev = cmd_buffer->last_batch_bo->prev_batch_bo;
2455 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, cmd_buffer->device);
2456 cmd_buffer->last_batch_bo = prev;
2457 }
2458 assert(cmd_buffer->last_batch_bo->prev_batch_bo == NULL);
2459
2460 cmd_buffer->batch.relocs.num_relocs = 0;
2461 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2462 GEN8_MI_BATCH_BUFFER_START_length * 4);
2463
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002464 /* Delete all but the first batch bo */
2465 while (cmd_buffer->surface_batch_bo->prev_batch_bo) {
2466 struct anv_batch_bo *prev = cmd_buffer->surface_batch_bo->prev_batch_bo;
2467 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, cmd_buffer->device);
2468 cmd_buffer->surface_batch_bo = prev;
2469 }
2470 assert(cmd_buffer->surface_batch_bo->prev_batch_bo == NULL);
2471
2472 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002473 cmd_buffer->surface_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002474
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002475 cmd_buffer->rs_state = NULL;
2476 cmd_buffer->vp_state = NULL;
2477 cmd_buffer->cb_state = NULL;
2478 cmd_buffer->ds_state = NULL;
2479
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002480 return VK_SUCCESS;
2481}
2482
2483// Command buffer building functions
2484
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002485void anv_CmdBindPipeline(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002486 VkCmdBuffer cmdBuffer,
2487 VkPipelineBindPoint pipelineBindPoint,
2488 VkPipeline _pipeline)
2489{
2490 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002491 struct anv_pipeline *pipeline = (struct anv_pipeline *) _pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002492
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002493 switch (pipelineBindPoint) {
2494 case VK_PIPELINE_BIND_POINT_COMPUTE:
2495 cmd_buffer->compute_pipeline = pipeline;
2496 cmd_buffer->compute_dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2497 break;
2498
2499 case VK_PIPELINE_BIND_POINT_GRAPHICS:
2500 cmd_buffer->pipeline = pipeline;
2501 cmd_buffer->vb_dirty |= pipeline->vb_used;
2502 cmd_buffer->dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2503 break;
2504
2505 default:
2506 assert(!"invalid bind point");
2507 break;
2508 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002509}
2510
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002511void anv_CmdBindDynamicStateObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002512 VkCmdBuffer cmdBuffer,
2513 VkStateBindPoint stateBindPoint,
2514 VkDynamicStateObject dynamicState)
2515{
2516 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002517
2518 switch (stateBindPoint) {
2519 case VK_STATE_BIND_POINT_VIEWPORT:
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07002520 cmd_buffer->vp_state = (struct anv_dynamic_vp_state *) dynamicState;
2521 cmd_buffer->dirty |= ANV_CMD_BUFFER_VP_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002522 break;
2523 case VK_STATE_BIND_POINT_RASTER:
2524 cmd_buffer->rs_state = (struct anv_dynamic_rs_state *) dynamicState;
2525 cmd_buffer->dirty |= ANV_CMD_BUFFER_RS_DIRTY;
2526 break;
2527 case VK_STATE_BIND_POINT_COLOR_BLEND:
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07002528 cmd_buffer->cb_state = (struct anv_dynamic_cb_state *) dynamicState;
2529 cmd_buffer->dirty |= ANV_CMD_BUFFER_CB_DIRTY;
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002530 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002531 case VK_STATE_BIND_POINT_DEPTH_STENCIL:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002532 cmd_buffer->ds_state = (struct anv_dynamic_ds_state *) dynamicState;
2533 cmd_buffer->dirty |= ANV_CMD_BUFFER_DS_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002534 break;
2535 default:
2536 break;
2537 };
2538}
2539
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002540static struct anv_state
2541anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer *cmd_buffer,
2542 uint32_t size, uint32_t alignment)
2543{
2544 struct anv_state state;
2545
Chad Versace55752fe2015-06-26 15:07:59 -07002546 state.offset = align_u32(cmd_buffer->surface_next, alignment);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002547 if (state.offset + size > cmd_buffer->surface_batch_bo->bo.size)
2548 return (struct anv_state) { 0 };
2549
2550 state.map = cmd_buffer->surface_batch_bo->bo.map + state.offset;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002551 state.alloc_size = size;
2552 cmd_buffer->surface_next = state.offset + size;
2553
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002554 assert(state.offset + size <= cmd_buffer->surface_batch_bo->bo.size);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002555
2556 return state;
2557}
2558
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002559static VkResult
2560anv_cmd_buffer_new_surface_state_bo(struct anv_cmd_buffer *cmd_buffer)
2561{
2562 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->surface_batch_bo;
2563
2564 /* Finish off the old buffer */
2565 old_bbo->num_relocs =
2566 cmd_buffer->surface_relocs.num_relocs - old_bbo->first_reloc;
2567 old_bbo->length = cmd_buffer->surface_next;
2568
2569 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2570 if (result != VK_SUCCESS)
2571 return result;
2572
2573 new_bbo->first_reloc = cmd_buffer->surface_relocs.num_relocs;
2574 cmd_buffer->surface_next = 1;
2575
2576 new_bbo->prev_batch_bo = old_bbo;
2577 cmd_buffer->surface_batch_bo = new_bbo;
2578
2579 /* Re-emit state base addresses so we get the new surface state base
2580 * address before we start emitting binding tables etc.
2581 */
2582 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
2583
Jason Ekstrande497ac22015-05-30 18:04:48 -07002584 /* It seems like just changing the state base addresses isn't enough.
2585 * Invalidating the cache seems to be enough to cause things to
2586 * propagate. However, I'm not 100% sure what we're supposed to do.
Jason Ekstrand33cccbb2015-05-30 08:02:52 -07002587 */
2588 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
2589 .TextureCacheInvalidationEnable = true);
2590
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002591 return VK_SUCCESS;
2592}
2593
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002594void anv_CmdBindDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002595 VkCmdBuffer cmdBuffer,
2596 VkPipelineBindPoint pipelineBindPoint,
Jason Ekstrand435b0622015-07-07 17:06:10 -07002597 VkPipelineLayout _layout,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002598 uint32_t firstSet,
2599 uint32_t setCount,
2600 const VkDescriptorSet* pDescriptorSets,
2601 uint32_t dynamicOffsetCount,
2602 const uint32_t* pDynamicOffsets)
2603{
2604 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand435b0622015-07-07 17:06:10 -07002605 struct anv_pipeline_layout *layout = (struct anv_pipeline_layout *) _layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002606 struct anv_descriptor_set *set;
2607 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002608
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002609 assert(firstSet + setCount < MAX_SETS);
2610
2611 uint32_t dynamic_slot = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002612 for (uint32_t i = 0; i < setCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002613 set = (struct anv_descriptor_set *) pDescriptorSets[i];
2614 set_layout = layout->set[firstSet + i].layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002615
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002616 cmd_buffer->descriptors[firstSet + i].set = set;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002617
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002618 assert(set_layout->num_dynamic_buffers <
2619 ARRAY_SIZE(cmd_buffer->descriptors[0].dynamic_offsets));
2620 memcpy(cmd_buffer->descriptors[firstSet + i].dynamic_offsets,
2621 pDynamicOffsets + dynamic_slot,
2622 set_layout->num_dynamic_buffers * sizeof(*pDynamicOffsets));
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002623
Jason Ekstrand22513052015-05-30 10:07:29 -07002624 cmd_buffer->descriptors_dirty |= set_layout->shader_stages;
2625
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002626 dynamic_slot += set_layout->num_dynamic_buffers;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002627 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002628}
2629
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002630void anv_CmdBindIndexBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002631 VkCmdBuffer cmdBuffer,
2632 VkBuffer _buffer,
2633 VkDeviceSize offset,
2634 VkIndexType indexType)
2635{
2636 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2637 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
2638
2639 static const uint32_t vk_to_gen_index_type[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002640 [VK_INDEX_TYPE_UINT16] = INDEX_WORD,
2641 [VK_INDEX_TYPE_UINT32] = INDEX_DWORD,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002642 };
2643
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002644 struct GEN8_3DSTATE_VF vf = {
2645 GEN8_3DSTATE_VF_header,
2646 .CutIndex = (indexType == VK_INDEX_TYPE_UINT16) ? UINT16_MAX : UINT32_MAX,
2647 };
2648 GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state_vf, &vf);
2649
2650 cmd_buffer->dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY;
2651
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002652 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
2653 .IndexFormat = vk_to_gen_index_type[indexType],
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002654 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002655 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002656 .BufferSize = buffer->size - offset);
2657}
2658
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002659void anv_CmdBindVertexBuffers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002660 VkCmdBuffer cmdBuffer,
2661 uint32_t startBinding,
2662 uint32_t bindingCount,
2663 const VkBuffer* pBuffers,
2664 const VkDeviceSize* pOffsets)
2665{
2666 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002667 struct anv_vertex_binding *vb = cmd_buffer->vertex_bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002668
2669 /* We have to defer setting up vertex buffer since we need the buffer
2670 * stride from the pipeline. */
2671
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002672 assert(startBinding + bindingCount < MAX_VBS);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002673 for (uint32_t i = 0; i < bindingCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002674 vb[startBinding + i].buffer = (struct anv_buffer *) pBuffers[i];
2675 vb[startBinding + i].offset = pOffsets[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002676 cmd_buffer->vb_dirty |= 1 << (startBinding + i);
2677 }
2678}
2679
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002680static VkResult
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002681cmd_buffer_emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002682 unsigned stage, struct anv_state *bt_state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002683{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002684 struct anv_pipeline_layout *layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002685 uint32_t color_attachments, bias, size;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002686
2687 if (stage == VK_SHADER_STAGE_COMPUTE)
2688 layout = cmd_buffer->compute_pipeline->layout;
2689 else
2690 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002691
2692 if (stage == VK_SHADER_STAGE_FRAGMENT) {
2693 bias = MAX_RTS;
2694 color_attachments = cmd_buffer->framebuffer->color_attachment_count;
2695 } else {
2696 bias = 0;
2697 color_attachments = 0;
2698 }
2699
2700 /* This is a little awkward: layout can be NULL but we still have to
2701 * allocate and set a binding table for the PS stage for render
2702 * targets. */
2703 uint32_t surface_count = layout ? layout->stage[stage].surface_count : 0;
2704
2705 if (color_attachments + surface_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002706 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002707
2708 size = (bias + surface_count) * sizeof(uint32_t);
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002709 *bt_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
2710 uint32_t *bt_map = bt_state->map;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002711
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002712 if (bt_state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002713 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2714
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002715 for (uint32_t ca = 0; ca < color_attachments; ca++) {
2716 const struct anv_surface_view *view =
2717 cmd_buffer->framebuffer->color_attachments[ca];
2718
2719 struct anv_state state =
2720 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2721
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002722 if (state.map == NULL)
2723 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2724
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002725 memcpy(state.map, view->surface_state.map, 64);
2726
2727 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2728 *(uint64_t *)(state.map + 8 * 4) =
2729 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2730 cmd_buffer->device,
2731 state.offset + 8 * 4,
2732 view->bo, view->offset);
2733
2734 bt_map[ca] = state.offset;
2735 }
2736
2737 if (layout == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002738 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002739
2740 for (uint32_t set = 0; set < layout->num_sets; set++) {
2741 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2742 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2743 struct anv_descriptor_slot *surface_slots =
2744 set_layout->stage[stage].surface_start;
2745
2746 uint32_t start = bias + layout->set[set].surface_start[stage];
2747
2748 for (uint32_t b = 0; b < set_layout->stage[stage].surface_count; b++) {
2749 struct anv_surface_view *view =
2750 d->set->descriptors[surface_slots[b].index].view;
2751
Jason Ekstrand03ffa9c2015-05-29 20:43:10 -07002752 if (!view)
2753 continue;
2754
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002755 struct anv_state state =
2756 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2757
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002758 if (state.map == NULL)
2759 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2760
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002761 uint32_t offset;
2762 if (surface_slots[b].dynamic_slot >= 0) {
2763 uint32_t dynamic_offset =
2764 d->dynamic_offsets[surface_slots[b].dynamic_slot];
2765
2766 offset = view->offset + dynamic_offset;
2767 fill_buffer_surface_state(state.map, view->format, offset,
2768 view->range - dynamic_offset);
2769 } else {
2770 offset = view->offset;
2771 memcpy(state.map, view->surface_state.map, 64);
2772 }
2773
2774 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2775 *(uint64_t *)(state.map + 8 * 4) =
2776 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2777 cmd_buffer->device,
2778 state.offset + 8 * 4,
2779 view->bo, offset);
2780
2781 bt_map[start + b] = state.offset;
2782 }
2783 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002784
2785 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002786}
2787
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002788static VkResult
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002789cmd_buffer_emit_samplers(struct anv_cmd_buffer *cmd_buffer,
2790 unsigned stage, struct anv_state *state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002791{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002792 struct anv_pipeline_layout *layout;
2793 uint32_t sampler_count;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002794
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002795 if (stage == VK_SHADER_STAGE_COMPUTE)
2796 layout = cmd_buffer->compute_pipeline->layout;
2797 else
2798 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002799
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002800 sampler_count = layout ? layout->stage[stage].sampler_count : 0;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002801 if (sampler_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002802 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002803
2804 uint32_t size = sampler_count * 16;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002805 *state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream, size, 32);
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002806
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002807 if (state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002808 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2809
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002810 for (uint32_t set = 0; set < layout->num_sets; set++) {
2811 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2812 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2813 struct anv_descriptor_slot *sampler_slots =
2814 set_layout->stage[stage].sampler_start;
2815
2816 uint32_t start = layout->set[set].sampler_start[stage];
2817
2818 for (uint32_t b = 0; b < set_layout->stage[stage].sampler_count; b++) {
2819 struct anv_sampler *sampler =
2820 d->set->descriptors[sampler_slots[b].index].sampler;
2821
2822 if (!sampler)
2823 continue;
2824
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002825 memcpy(state->map + (start + b) * 16,
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002826 sampler->state, sizeof(sampler->state));
2827 }
2828 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002829
2830 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002831}
2832
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002833static VkResult
2834flush_descriptor_set(struct anv_cmd_buffer *cmd_buffer, uint32_t stage)
2835{
2836 struct anv_state surfaces = { 0, }, samplers = { 0, };
2837 VkResult result;
2838
2839 result = cmd_buffer_emit_samplers(cmd_buffer, stage, &samplers);
2840 if (result != VK_SUCCESS)
2841 return result;
2842 result = cmd_buffer_emit_binding_table(cmd_buffer, stage, &surfaces);
2843 if (result != VK_SUCCESS)
2844 return result;
2845
2846 static const uint32_t sampler_state_opcodes[] = {
2847 [VK_SHADER_STAGE_VERTEX] = 43,
2848 [VK_SHADER_STAGE_TESS_CONTROL] = 44, /* HS */
2849 [VK_SHADER_STAGE_TESS_EVALUATION] = 45, /* DS */
2850 [VK_SHADER_STAGE_GEOMETRY] = 46,
2851 [VK_SHADER_STAGE_FRAGMENT] = 47,
2852 [VK_SHADER_STAGE_COMPUTE] = 0,
2853 };
2854
2855 static const uint32_t binding_table_opcodes[] = {
2856 [VK_SHADER_STAGE_VERTEX] = 38,
2857 [VK_SHADER_STAGE_TESS_CONTROL] = 39,
2858 [VK_SHADER_STAGE_TESS_EVALUATION] = 40,
2859 [VK_SHADER_STAGE_GEOMETRY] = 41,
2860 [VK_SHADER_STAGE_FRAGMENT] = 42,
2861 [VK_SHADER_STAGE_COMPUTE] = 0,
2862 };
2863
2864 if (samplers.alloc_size > 0) {
2865 anv_batch_emit(&cmd_buffer->batch,
2866 GEN8_3DSTATE_SAMPLER_STATE_POINTERS_VS,
2867 ._3DCommandSubOpcode = sampler_state_opcodes[stage],
2868 .PointertoVSSamplerState = samplers.offset);
2869 }
2870
2871 if (surfaces.alloc_size > 0) {
2872 anv_batch_emit(&cmd_buffer->batch,
2873 GEN8_3DSTATE_BINDING_TABLE_POINTERS_VS,
2874 ._3DCommandSubOpcode = binding_table_opcodes[stage],
2875 .PointertoVSBindingTable = surfaces.offset);
2876 }
2877
2878 return VK_SUCCESS;
2879}
2880
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002881static void
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002882flush_descriptor_sets(struct anv_cmd_buffer *cmd_buffer)
2883{
Jason Ekstrand22513052015-05-30 10:07:29 -07002884 uint32_t s, dirty = cmd_buffer->descriptors_dirty &
2885 cmd_buffer->pipeline->active_stages;
2886
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002887 VkResult result;
Jason Ekstrand22513052015-05-30 10:07:29 -07002888 for_each_bit(s, dirty) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002889 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002890 if (result != VK_SUCCESS)
2891 break;
2892 }
2893
2894 if (result != VK_SUCCESS) {
2895 assert(result == VK_ERROR_OUT_OF_DEVICE_MEMORY);
2896
2897 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
2898 assert(result == VK_SUCCESS);
2899
Jason Ekstrand22513052015-05-30 10:07:29 -07002900 /* Re-emit all active binding tables */
2901 for_each_bit(s, cmd_buffer->pipeline->active_stages) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002902 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002903
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002904 /* It had better succeed this time */
2905 assert(result == VK_SUCCESS);
2906 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002907 }
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002908
Jason Ekstrand22513052015-05-30 10:07:29 -07002909 cmd_buffer->descriptors_dirty &= ~cmd_buffer->pipeline->active_stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002910}
2911
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002912static struct anv_state
2913anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
2914 uint32_t *a, uint32_t dwords, uint32_t alignment)
2915{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002916 struct anv_state state;
2917
Jason Ekstrandce002332015-06-05 17:14:41 -07002918 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2919 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002920 memcpy(state.map, a, dwords * 4);
2921
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002922 VG(VALGRIND_CHECK_MEM_IS_DEFINED(state.map, dwords * 4));
2923
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002924 return state;
2925}
2926
2927static struct anv_state
2928anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrandce002332015-06-05 17:14:41 -07002929 uint32_t *a, uint32_t *b,
2930 uint32_t dwords, uint32_t alignment)
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002931{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002932 struct anv_state state;
2933 uint32_t *p;
2934
Jason Ekstrandce002332015-06-05 17:14:41 -07002935 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2936 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002937 p = state.map;
2938 for (uint32_t i = 0; i < dwords; i++)
2939 p[i] = a[i] | b[i];
2940
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002941 VG(VALGRIND_CHECK_MEM_IS_DEFINED(p, dwords * 4));
2942
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002943 return state;
2944}
2945
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002946static VkResult
2947flush_compute_descriptor_set(struct anv_cmd_buffer *cmd_buffer)
2948{
2949 struct anv_device *device = cmd_buffer->device;
2950 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2951 struct anv_state surfaces = { 0, }, samplers = { 0, };
2952 VkResult result;
2953
2954 result = cmd_buffer_emit_samplers(cmd_buffer,
2955 VK_SHADER_STAGE_COMPUTE, &samplers);
2956 if (result != VK_SUCCESS)
2957 return result;
2958 result = cmd_buffer_emit_binding_table(cmd_buffer,
2959 VK_SHADER_STAGE_COMPUTE, &surfaces);
2960 if (result != VK_SUCCESS)
2961 return result;
2962
2963 struct GEN8_INTERFACE_DESCRIPTOR_DATA desc = {
2964 .KernelStartPointer = pipeline->cs_simd,
2965 .KernelStartPointerHigh = 0,
2966 .BindingTablePointer = surfaces.offset,
2967 .BindingTableEntryCount = 0,
2968 .SamplerStatePointer = samplers.offset,
2969 .SamplerCount = 0,
2970 .NumberofThreadsinGPGPUThreadGroup = 0 /* FIXME: Really? */
2971 };
2972
2973 uint32_t size = GEN8_INTERFACE_DESCRIPTOR_DATA_length * sizeof(uint32_t);
2974 struct anv_state state =
2975 anv_state_pool_alloc(&device->dynamic_state_pool, size, 64);
2976
2977 GEN8_INTERFACE_DESCRIPTOR_DATA_pack(NULL, state.map, &desc);
2978
2979 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_INTERFACE_DESCRIPTOR_LOAD,
2980 .InterfaceDescriptorTotalLength = size,
2981 .InterfaceDescriptorDataStartAddress = state.offset);
2982
2983 return VK_SUCCESS;
2984}
2985
2986static void
2987anv_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer)
2988{
2989 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2990 VkResult result;
2991
2992 assert(pipeline->active_stages == VK_SHADER_STAGE_COMPUTE_BIT);
2993
2994 if (cmd_buffer->current_pipeline != GPGPU) {
2995 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
2996 .PipelineSelection = GPGPU);
2997 cmd_buffer->current_pipeline = GPGPU;
2998 }
2999
3000 if (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)
3001 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
3002
3003 if ((cmd_buffer->descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) ||
3004 (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) {
3005 result = flush_compute_descriptor_set(cmd_buffer);
3006 if (result != VK_SUCCESS) {
3007 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
3008 assert(result == VK_SUCCESS);
3009 result = flush_compute_descriptor_set(cmd_buffer);
3010 assert(result == VK_SUCCESS);
3011 }
3012 cmd_buffer->descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE;
3013 }
3014
3015 cmd_buffer->compute_dirty = 0;
3016}
3017
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003018static void
3019anv_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
3020{
3021 struct anv_pipeline *pipeline = cmd_buffer->pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003022 uint32_t *p;
3023
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003024 uint32_t vb_emit = cmd_buffer->vb_dirty & pipeline->vb_used;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003025
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07003026 assert((pipeline->active_stages & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
3027
3028 if (cmd_buffer->current_pipeline != _3D) {
3029 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
3030 .PipelineSelection = _3D);
3031 cmd_buffer->current_pipeline = _3D;
3032 }
3033
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003034 if (vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003035 const uint32_t num_buffers = __builtin_popcount(vb_emit);
3036 const uint32_t num_dwords = 1 + num_buffers * 4;
3037
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003038 p = anv_batch_emitn(&cmd_buffer->batch, num_dwords,
3039 GEN8_3DSTATE_VERTEX_BUFFERS);
3040 uint32_t vb, i = 0;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003041 for_each_bit(vb, vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003042 struct anv_buffer *buffer = cmd_buffer->vertex_bindings[vb].buffer;
3043 uint32_t offset = cmd_buffer->vertex_bindings[vb].offset;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003044
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003045 struct GEN8_VERTEX_BUFFER_STATE state = {
3046 .VertexBufferIndex = vb,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07003047 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003048 .AddressModifyEnable = true,
3049 .BufferPitch = pipeline->binding_stride[vb],
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003050 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003051 .BufferSize = buffer->size - offset
3052 };
3053
3054 GEN8_VERTEX_BUFFER_STATE_pack(&cmd_buffer->batch, &p[1 + i * 4], &state);
3055 i++;
3056 }
3057 }
3058
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003059 if (cmd_buffer->dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) {
3060 /* If somebody compiled a pipeline after starting a command buffer the
3061 * scratch bo may have grown since we started this cmd buffer (and
3062 * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now,
3063 * reemit STATE_BASE_ADDRESS so that we use the bigger scratch bo. */
3064 if (cmd_buffer->scratch_size < pipeline->total_scratch)
3065 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
3066
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003067 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003068 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003069
Jason Ekstrand22513052015-05-30 10:07:29 -07003070 if (cmd_buffer->descriptors_dirty)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003071 flush_descriptor_sets(cmd_buffer);
3072
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07003073 if (cmd_buffer->dirty & ANV_CMD_BUFFER_VP_DIRTY) {
3074 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SCISSOR_STATE_POINTERS,
3075 .ScissorRectPointer = cmd_buffer->vp_state->scissor.offset);
3076 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_CC,
3077 .CCViewportPointer = cmd_buffer->vp_state->cc_vp.offset);
3078 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_SF_CLIP,
3079 .SFClipViewportPointer = cmd_buffer->vp_state->sf_clip_vp.offset);
3080 }
3081
Kristian Høgsberg99883772015-05-26 09:40:10 -07003082 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_RS_DIRTY)) {
Kristian Høgsberg55b9b702015-05-11 22:23:38 -07003083 anv_batch_emit_merge(&cmd_buffer->batch,
3084 cmd_buffer->rs_state->state_sf, pipeline->state_sf);
Kristian Høgsberg99883772015-05-26 09:40:10 -07003085 anv_batch_emit_merge(&cmd_buffer->batch,
3086 cmd_buffer->rs_state->state_raster, pipeline->state_raster);
3087 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003088
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07003089 if (cmd_buffer->ds_state &&
3090 (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)))
3091 anv_batch_emit_merge(&cmd_buffer->batch,
3092 cmd_buffer->ds_state->state_wm_depth_stencil,
3093 pipeline->state_wm_depth_stencil);
3094
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003095 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_CB_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)) {
3096 struct anv_state state;
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003097 if (cmd_buffer->ds_state == NULL)
3098 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3099 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003100 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003101 else if (cmd_buffer->cb_state == NULL)
3102 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3103 cmd_buffer->ds_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003104 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003105 else
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003106 state = anv_cmd_buffer_merge_dynamic(cmd_buffer,
3107 cmd_buffer->ds_state->state_color_calc,
3108 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003109 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003110
3111 anv_batch_emit(&cmd_buffer->batch,
3112 GEN8_3DSTATE_CC_STATE_POINTERS,
3113 .ColorCalcStatePointer = state.offset,
3114 .ColorCalcStatePointerValid = true);
3115 }
3116
Jason Ekstrand7fbed522015-07-07 15:11:56 -07003117 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) {
3118 anv_batch_emit_merge(&cmd_buffer->batch,
3119 cmd_buffer->state_vf, pipeline->state_vf);
3120 }
3121
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003122 cmd_buffer->vb_dirty &= ~vb_emit;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003123 cmd_buffer->dirty = 0;
3124}
3125
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003126void anv_CmdDraw(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003127 VkCmdBuffer cmdBuffer,
3128 uint32_t firstVertex,
3129 uint32_t vertexCount,
3130 uint32_t firstInstance,
3131 uint32_t instanceCount)
3132{
3133 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3134
3135 anv_cmd_buffer_flush_state(cmd_buffer);
3136
3137 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3138 .VertexAccessType = SEQUENTIAL,
3139 .VertexCountPerInstance = vertexCount,
3140 .StartVertexLocation = firstVertex,
3141 .InstanceCount = instanceCount,
3142 .StartInstanceLocation = firstInstance,
3143 .BaseVertexLocation = 0);
3144}
3145
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003146void anv_CmdDrawIndexed(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003147 VkCmdBuffer cmdBuffer,
3148 uint32_t firstIndex,
3149 uint32_t indexCount,
3150 int32_t vertexOffset,
3151 uint32_t firstInstance,
3152 uint32_t instanceCount)
3153{
3154 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3155
3156 anv_cmd_buffer_flush_state(cmd_buffer);
3157
3158 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3159 .VertexAccessType = RANDOM,
3160 .VertexCountPerInstance = indexCount,
3161 .StartVertexLocation = firstIndex,
3162 .InstanceCount = instanceCount,
3163 .StartInstanceLocation = firstInstance,
Kristian Høgsberg Kristensenc8f07852015-06-02 22:35:47 -07003164 .BaseVertexLocation = vertexOffset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003165}
3166
3167static void
3168anv_batch_lrm(struct anv_batch *batch,
3169 uint32_t reg, struct anv_bo *bo, uint32_t offset)
3170{
3171 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3172 .RegisterAddress = reg,
3173 .MemoryAddress = { bo, offset });
3174}
3175
3176static void
3177anv_batch_lri(struct anv_batch *batch, uint32_t reg, uint32_t imm)
3178{
3179 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_IMM,
3180 .RegisterOffset = reg,
3181 .DataDWord = imm);
3182}
3183
3184/* Auto-Draw / Indirect Registers */
3185#define GEN7_3DPRIM_END_OFFSET 0x2420
3186#define GEN7_3DPRIM_START_VERTEX 0x2430
3187#define GEN7_3DPRIM_VERTEX_COUNT 0x2434
3188#define GEN7_3DPRIM_INSTANCE_COUNT 0x2438
3189#define GEN7_3DPRIM_START_INSTANCE 0x243C
3190#define GEN7_3DPRIM_BASE_VERTEX 0x2440
3191
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003192void anv_CmdDrawIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003193 VkCmdBuffer cmdBuffer,
3194 VkBuffer _buffer,
3195 VkDeviceSize offset,
3196 uint32_t count,
3197 uint32_t stride)
3198{
3199 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3200 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003201 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003202 uint32_t bo_offset = buffer->offset + offset;
3203
3204 anv_cmd_buffer_flush_state(cmd_buffer);
3205
3206 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3207 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3208 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3209 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 12);
3210 anv_batch_lri(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, 0);
3211
3212 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3213 .IndirectParameterEnable = true,
3214 .VertexAccessType = SEQUENTIAL);
3215}
3216
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003217void anv_CmdDrawIndexedIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003218 VkCmdBuffer cmdBuffer,
3219 VkBuffer _buffer,
3220 VkDeviceSize offset,
3221 uint32_t count,
3222 uint32_t stride)
3223{
3224 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3225 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003226 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003227 uint32_t bo_offset = buffer->offset + offset;
3228
3229 anv_cmd_buffer_flush_state(cmd_buffer);
3230
3231 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3232 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3233 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3234 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, bo, bo_offset + 12);
3235 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 16);
3236
3237 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3238 .IndirectParameterEnable = true,
3239 .VertexAccessType = RANDOM);
3240}
3241
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003242void anv_CmdDispatch(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003243 VkCmdBuffer cmdBuffer,
3244 uint32_t x,
3245 uint32_t y,
3246 uint32_t z)
3247{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003248 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003249 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3250 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003251
3252 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3253
3254 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003255 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003256 .ThreadDepthCounterMaximum = 0,
3257 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003258 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003259 .ThreadGroupIDXDimension = x,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003260 .ThreadGroupIDYDimension = y,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003261 .ThreadGroupIDZDimension = z,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003262 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003263 .BottomExecutionMask = 0xffffffff);
3264
3265 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003266}
3267
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003268#define GPGPU_DISPATCHDIMX 0x2500
3269#define GPGPU_DISPATCHDIMY 0x2504
3270#define GPGPU_DISPATCHDIMZ 0x2508
3271
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003272void anv_CmdDispatchIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003273 VkCmdBuffer cmdBuffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003274 VkBuffer _buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003275 VkDeviceSize offset)
3276{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003277 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003278 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3279 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003280 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
3281 struct anv_bo *bo = buffer->bo;
3282 uint32_t bo_offset = buffer->offset + offset;
3283
3284 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3285
3286 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMX, bo, bo_offset);
3287 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMY, bo, bo_offset + 4);
3288 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMZ, bo, bo_offset + 8);
3289
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003290 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
3291 .IndirectParameterEnable = true,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003292 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003293 .ThreadDepthCounterMaximum = 0,
3294 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003295 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
3296 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003297 .BottomExecutionMask = 0xffffffff);
3298
3299 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003300}
3301
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003302void anv_CmdSetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003303 VkCmdBuffer cmdBuffer,
3304 VkEvent event,
3305 VkPipeEvent pipeEvent)
3306{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003307 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003308}
3309
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003310void anv_CmdResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003311 VkCmdBuffer cmdBuffer,
3312 VkEvent event,
3313 VkPipeEvent pipeEvent)
3314{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003315 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003316}
3317
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003318void anv_CmdWaitEvents(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003319 VkCmdBuffer cmdBuffer,
3320 VkWaitEvent waitEvent,
3321 uint32_t eventCount,
3322 const VkEvent* pEvents,
Chad Versace85c0d692015-07-07 15:49:57 -07003323 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003324 uint32_t memBarrierCount,
Chad Versace85c0d692015-07-07 15:49:57 -07003325 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003326{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003327 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003328}
3329
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003330void anv_CmdPipelineBarrier(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003331 VkCmdBuffer cmdBuffer,
3332 VkWaitEvent waitEvent,
Chad Versace18ee32e2015-07-07 15:42:38 -07003333 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003334 uint32_t memBarrierCount,
Chad Versace18ee32e2015-07-07 15:42:38 -07003335 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003336{
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003337 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3338 uint32_t b, *dw;
3339
3340 struct GEN8_PIPE_CONTROL cmd = {
3341 GEN8_PIPE_CONTROL_header,
3342 .PostSyncOperation = NoWrite,
3343 };
3344
3345 /* XXX: I think waitEvent is a no-op on our HW. We should verify that. */
3346
Chad Versace18ee32e2015-07-07 15:42:38 -07003347 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_TOP_OF_PIPE_BIT)) {
3348 /* This is just what PIPE_CONTROL does */
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003349 }
3350
Chad Versace18ee32e2015-07-07 15:42:38 -07003351 if (anv_clear_mask(&pipeEventMask,
3352 VK_PIPE_EVENT_VERTEX_PROCESSING_COMPLETE_BIT |
3353 VK_PIPE_EVENT_LOCAL_FRAGMENT_PROCESSING_COMPLETE_BIT |
3354 VK_PIPE_EVENT_FRAGMENT_PROCESSING_COMPLETE_BIT)) {
3355 cmd.StallAtPixelScoreboard = true;
3356 }
3357
3358
3359 if (anv_clear_mask(&pipeEventMask,
3360 VK_PIPE_EVENT_GRAPHICS_PIPELINE_COMPLETE_BIT |
3361 VK_PIPE_EVENT_COMPUTE_PIPELINE_COMPLETE_BIT |
3362 VK_PIPE_EVENT_TRANSFER_COMPLETE_BIT |
3363 VK_PIPE_EVENT_COMMANDS_COMPLETE_BIT)) {
3364 cmd.CommandStreamerStallEnable = true;
3365 }
3366
3367 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_CPU_SIGNAL_BIT)) {
3368 anv_finishme("VK_PIPE_EVENT_CPU_SIGNAL_BIT");
3369 }
3370
3371 /* We checked all known VkPipeEventFlags. */
3372 anv_assert(pipeEventMask == 0);
3373
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003374 /* XXX: Right now, we're really dumb and just flush whatever categories
3375 * the app asks for. One of these days we may make this a bit better
3376 * but right now that's all the hardware allows for in most areas.
3377 */
3378 VkMemoryOutputFlags out_flags = 0;
3379 VkMemoryInputFlags in_flags = 0;
3380
3381 for (uint32_t i = 0; i < memBarrierCount; i++) {
3382 const struct anv_common *common = ppMemBarriers[i];
3383 switch (common->sType) {
3384 case VK_STRUCTURE_TYPE_MEMORY_BARRIER: {
3385 const VkMemoryBarrier *barrier = (VkMemoryBarrier *)common;
3386 out_flags |= barrier->outputMask;
3387 in_flags |= barrier->inputMask;
3388 break;
3389 }
3390 case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: {
3391 const VkBufferMemoryBarrier *barrier = (VkBufferMemoryBarrier *)common;
3392 out_flags |= barrier->outputMask;
3393 in_flags |= barrier->inputMask;
3394 break;
3395 }
3396 case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER: {
3397 const VkImageMemoryBarrier *barrier = (VkImageMemoryBarrier *)common;
3398 out_flags |= barrier->outputMask;
3399 in_flags |= barrier->inputMask;
3400 break;
3401 }
3402 default:
3403 unreachable("Invalid memory barrier type");
3404 }
3405 }
3406
3407 for_each_bit(b, out_flags) {
3408 switch ((VkMemoryOutputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003409 case VK_MEMORY_OUTPUT_HOST_WRITE_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003410 break; /* FIXME: Little-core systems */
3411 case VK_MEMORY_OUTPUT_SHADER_WRITE_BIT:
3412 cmd.DCFlushEnable = true;
3413 break;
3414 case VK_MEMORY_OUTPUT_COLOR_ATTACHMENT_BIT:
3415 cmd.RenderTargetCacheFlushEnable = true;
3416 break;
3417 case VK_MEMORY_OUTPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3418 cmd.DepthCacheFlushEnable = true;
3419 break;
3420 case VK_MEMORY_OUTPUT_TRANSFER_BIT:
3421 cmd.RenderTargetCacheFlushEnable = true;
3422 cmd.DepthCacheFlushEnable = true;
3423 break;
3424 default:
3425 unreachable("Invalid memory output flag");
3426 }
3427 }
3428
3429 for_each_bit(b, out_flags) {
3430 switch ((VkMemoryInputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003431 case VK_MEMORY_INPUT_HOST_READ_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003432 break; /* FIXME: Little-core systems */
3433 case VK_MEMORY_INPUT_INDIRECT_COMMAND_BIT:
3434 case VK_MEMORY_INPUT_INDEX_FETCH_BIT:
3435 case VK_MEMORY_INPUT_VERTEX_ATTRIBUTE_FETCH_BIT:
3436 cmd.VFCacheInvalidationEnable = true;
3437 break;
3438 case VK_MEMORY_INPUT_UNIFORM_READ_BIT:
3439 cmd.ConstantCacheInvalidationEnable = true;
3440 /* fallthrough */
3441 case VK_MEMORY_INPUT_SHADER_READ_BIT:
3442 cmd.DCFlushEnable = true;
3443 cmd.TextureCacheInvalidationEnable = true;
3444 break;
3445 case VK_MEMORY_INPUT_COLOR_ATTACHMENT_BIT:
3446 case VK_MEMORY_INPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3447 break; /* XXX: Hunh? */
3448 case VK_MEMORY_INPUT_TRANSFER_BIT:
3449 cmd.TextureCacheInvalidationEnable = true;
3450 break;
3451 }
3452 }
3453
3454 dw = anv_batch_emit_dwords(&cmd_buffer->batch, GEN8_PIPE_CONTROL_length);
3455 GEN8_PIPE_CONTROL_pack(&cmd_buffer->batch, dw, &cmd);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003456}
3457
Jason Ekstrand57153da2015-05-22 15:15:08 -07003458static void
3459anv_framebuffer_destroy(struct anv_device *device,
3460 struct anv_object *object,
3461 VkObjectType obj_type)
3462{
3463 struct anv_framebuffer *fb = (struct anv_framebuffer *)object;
3464
3465 assert(obj_type == VK_OBJECT_TYPE_FRAMEBUFFER);
3466
3467 anv_DestroyObject((VkDevice) device,
3468 VK_OBJECT_TYPE_DYNAMIC_VP_STATE,
3469 fb->vp_state);
3470
3471 anv_device_free(device, fb);
3472}
3473
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003474VkResult anv_CreateFramebuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003475 VkDevice _device,
3476 const VkFramebufferCreateInfo* pCreateInfo,
3477 VkFramebuffer* pFramebuffer)
3478{
3479 struct anv_device *device = (struct anv_device *) _device;
3480 struct anv_framebuffer *framebuffer;
3481
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003482 static const struct anv_depth_stencil_view null_view =
3483 { .depth_format = D16_UNORM, .depth_stride = 0, .stencil_stride = 0 };
3484
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003485 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
3486
3487 framebuffer = anv_device_alloc(device, sizeof(*framebuffer), 8,
3488 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3489 if (framebuffer == NULL)
3490 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3491
Jason Ekstrand57153da2015-05-22 15:15:08 -07003492 framebuffer->base.destructor = anv_framebuffer_destroy;
3493
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003494 framebuffer->color_attachment_count = pCreateInfo->colorAttachmentCount;
3495 for (uint32_t i = 0; i < pCreateInfo->colorAttachmentCount; i++) {
3496 framebuffer->color_attachments[i] =
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07003497 (struct anv_surface_view *) pCreateInfo->pColorAttachments[i].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003498 }
3499
3500 if (pCreateInfo->pDepthStencilAttachment) {
3501 framebuffer->depth_stencil =
3502 (struct anv_depth_stencil_view *) pCreateInfo->pDepthStencilAttachment->view;
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003503 } else {
3504 framebuffer->depth_stencil = &null_view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003505 }
3506
3507 framebuffer->sample_count = pCreateInfo->sampleCount;
3508 framebuffer->width = pCreateInfo->width;
3509 framebuffer->height = pCreateInfo->height;
3510 framebuffer->layers = pCreateInfo->layers;
3511
Jason Ekstrand919e7b72015-06-09 16:01:56 -07003512 anv_CreateDynamicViewportState((VkDevice) device,
Jason Ekstrand0599d392015-06-09 15:53:10 -07003513 &(VkDynamicVpStateCreateInfo) {
3514 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO,
3515 .viewportAndScissorCount = 1,
3516 .pViewports = (VkViewport[]) {
3517 {
3518 .originX = 0,
3519 .originY = 0,
3520 .width = pCreateInfo->width,
3521 .height = pCreateInfo->height,
3522 .minDepth = 0,
3523 .maxDepth = 1
3524 },
3525 },
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07003526 .pScissors = (VkRect2D[]) {
Jason Ekstrand0599d392015-06-09 15:53:10 -07003527 { { 0, 0 },
3528 { pCreateInfo->width, pCreateInfo->height } },
3529 }
3530 },
3531 &framebuffer->vp_state);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003532
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003533 *pFramebuffer = (VkFramebuffer) framebuffer;
3534
3535 return VK_SUCCESS;
3536}
3537
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003538VkResult anv_CreateRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003539 VkDevice _device,
3540 const VkRenderPassCreateInfo* pCreateInfo,
3541 VkRenderPass* pRenderPass)
3542{
3543 struct anv_device *device = (struct anv_device *) _device;
3544 struct anv_render_pass *pass;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003545 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003546
3547 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
3548
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003549 size = sizeof(*pass) +
3550 pCreateInfo->layers * sizeof(struct anv_render_pass_layer);
3551 pass = anv_device_alloc(device, size, 8,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003552 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3553 if (pass == NULL)
3554 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3555
3556 pass->render_area = pCreateInfo->renderArea;
3557
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003558 pass->num_layers = pCreateInfo->layers;
3559
3560 pass->num_clear_layers = 0;
3561 for (uint32_t i = 0; i < pCreateInfo->layers; i++) {
3562 pass->layers[i].color_load_op = pCreateInfo->pColorLoadOps[i];
3563 pass->layers[i].clear_color = pCreateInfo->pColorLoadClearValues[i];
3564 if (pass->layers[i].color_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR)
3565 pass->num_clear_layers++;
3566 }
3567
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003568 *pRenderPass = (VkRenderPass) pass;
3569
3570 return VK_SUCCESS;
3571}
3572
Jason Ekstrand0ff06542015-07-07 17:11:35 -07003573VkResult anv_GetRenderAreaGranularity(
3574 VkDevice device,
3575 VkRenderPass renderPass,
3576 VkExtent2D* pGranularity)
3577{
3578 *pGranularity = (VkExtent2D) { 1, 1 };
3579
3580 return VK_SUCCESS;
3581}
3582
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003583static void
3584anv_cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer,
3585 struct anv_render_pass *pass)
3586{
3587 const struct anv_depth_stencil_view *view =
3588 cmd_buffer->framebuffer->depth_stencil;
3589
3590 /* FIXME: Implement the PMA stall W/A */
Chad Versace709fa462015-06-26 22:15:03 -07003591 /* FIXME: Width and Height are wrong */
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003592
3593 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DEPTH_BUFFER,
3594 .SurfaceType = SURFTYPE_2D,
3595 .DepthWriteEnable = view->depth_stride > 0,
3596 .StencilWriteEnable = view->stencil_stride > 0,
3597 .HierarchicalDepthBufferEnable = false,
3598 .SurfaceFormat = view->depth_format,
3599 .SurfacePitch = view->depth_stride > 0 ? view->depth_stride - 1 : 0,
3600 .SurfaceBaseAddress = { view->bo, view->depth_offset },
3601 .Height = pass->render_area.extent.height - 1,
3602 .Width = pass->render_area.extent.width - 1,
3603 .LOD = 0,
3604 .Depth = 1 - 1,
3605 .MinimumArrayElement = 0,
3606 .DepthBufferObjectControlState = GEN8_MOCS,
3607 .RenderTargetViewExtent = 1 - 1,
Chad Versace7ea707a2015-06-25 19:46:42 -07003608 .SurfaceQPitch = view->depth_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003609
3610 /* Disable hierarchial depth buffers. */
3611 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HIER_DEPTH_BUFFER);
3612
3613 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STENCIL_BUFFER,
3614 .StencilBufferEnable = view->stencil_stride > 0,
3615 .StencilBufferObjectControlState = GEN8_MOCS,
3616 .SurfacePitch = view->stencil_stride > 0 ? view->stencil_stride - 1 : 0,
3617 .SurfaceBaseAddress = { view->bo, view->stencil_offset },
Chad Versace7ea707a2015-06-25 19:46:42 -07003618 .SurfaceQPitch = view->stencil_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003619
3620 /* Clear the clear params. */
3621 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_CLEAR_PARAMS);
3622}
3623
Chad Versacef78d6842015-07-07 15:46:19 -07003624void anv_CmdPushConstants(
3625 VkCmdBuffer cmdBuffer,
3626 VkPipelineLayout layout,
3627 VkShaderStageFlags stageFlags,
3628 uint32_t start,
3629 uint32_t length,
3630 const void* values)
3631{
3632 stub();
3633}
3634
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003635void anv_CmdBeginRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003636 VkCmdBuffer cmdBuffer,
3637 const VkRenderPassBegin* pRenderPassBegin)
3638{
3639 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3640 struct anv_render_pass *pass = (struct anv_render_pass *) pRenderPassBegin->renderPass;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003641 struct anv_framebuffer *framebuffer =
3642 (struct anv_framebuffer *) pRenderPassBegin->framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003643
Jason Ekstrand52940e82015-07-08 10:57:13 -07003644 assert(pRenderPassBegin->contents == VK_RENDER_PASS_CONTENTS_INLINE);
3645
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003646 cmd_buffer->framebuffer = framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003647
Jason Ekstrand22513052015-05-30 10:07:29 -07003648 cmd_buffer->descriptors_dirty |= VK_SHADER_STAGE_FRAGMENT_BIT;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003649
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003650 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DRAWING_RECTANGLE,
3651 .ClippedDrawingRectangleYMin = pass->render_area.offset.y,
3652 .ClippedDrawingRectangleXMin = pass->render_area.offset.x,
3653 .ClippedDrawingRectangleYMax =
3654 pass->render_area.offset.y + pass->render_area.extent.height - 1,
3655 .ClippedDrawingRectangleXMax =
3656 pass->render_area.offset.x + pass->render_area.extent.width - 1,
3657 .DrawingRectangleOriginY = 0,
3658 .DrawingRectangleOriginX = 0);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003659
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003660 anv_cmd_buffer_emit_depth_stencil(cmd_buffer, pass);
3661
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003662 anv_cmd_buffer_clear(cmd_buffer, pass);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003663}
3664
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003665void anv_CmdEndRenderPass(
Jason Ekstranda35fef12015-07-07 16:22:23 -07003666 VkCmdBuffer cmdBuffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003667{
Jason Ekstranda1309c52015-05-13 22:13:05 -07003668 /* Emit a flushing pipe control at the end of a pass. This is kind of a
3669 * hack but it ensures that render targets always actually get written.
3670 * Eventually, we should do flushing based on image format transitions
3671 * or something of that nature.
3672 */
3673 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3674 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3675 .PostSyncOperation = NoWrite,
3676 .RenderTargetCacheFlushEnable = true,
3677 .InstructionCacheInvalidateEnable = true,
3678 .DepthCacheFlushEnable = true,
3679 .VFCacheInvalidationEnable = true,
3680 .TextureCacheInvalidationEnable = true,
3681 .CommandStreamerStallEnable = true);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003682}
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003683
Chad Versacedff32232015-07-07 15:51:55 -07003684void anv_CmdExecuteCommands(
3685 VkCmdBuffer cmdBuffer,
3686 uint32_t cmdBuffersCount,
3687 const VkCmdBuffer* pCmdBuffers)
3688{
3689 stub();
3690}
3691
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003692void vkCmdDbgMarkerBegin(
3693 VkCmdBuffer cmdBuffer,
3694 const char* pMarker)
3695 __attribute__ ((visibility ("default")));
3696
3697void vkCmdDbgMarkerEnd(
3698 VkCmdBuffer cmdBuffer)
3699 __attribute__ ((visibility ("default")));
3700
3701VkResult vkDbgSetObjectTag(
3702 VkDevice device,
3703 VkObject object,
3704 size_t tagSize,
3705 const void* pTag)
3706 __attribute__ ((visibility ("default")));
3707
3708
3709void vkCmdDbgMarkerBegin(
3710 VkCmdBuffer cmdBuffer,
3711 const char* pMarker)
3712{
3713}
3714
3715void vkCmdDbgMarkerEnd(
3716 VkCmdBuffer cmdBuffer)
3717{
3718}
3719
3720VkResult vkDbgSetObjectTag(
3721 VkDevice device,
3722 VkObject object,
3723 size_t tagSize,
3724 const void* pTag)
3725{
3726 return VK_SUCCESS;
3727}