blob: bbe4ff1c87e788e476a2db10602c4f80df451d47 [file] [log] [blame]
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001/*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "private.h"
31
32static int
33anv_env_get_int(const char *name)
34{
35 const char *val = getenv(name);
36
37 if (!val)
38 return 0;
39
40 return strtol(val, NULL, 0);
41}
42
43static VkResult
44fill_physical_device(struct anv_physical_device *device,
45 struct anv_instance *instance,
46 const char *path)
47{
48 int fd;
49
Chad Versacec34d3142015-07-09 15:23:25 -070050 fd = open(path, O_RDWR | O_CLOEXEC);
Kristian Høgsberg769785c2015-05-08 22:32:37 -070051 if (fd < 0)
52 return vk_error(VK_ERROR_UNAVAILABLE);
53
54 device->instance = instance;
55 device->path = path;
56
57 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
58 device->no_hw = false;
59 if (device->chipset_id) {
60 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
61 device->no_hw = true;
62 } else {
63 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
64 }
65 if (!device->chipset_id)
66 goto fail;
67
68 device->name = brw_get_device_name(device->chipset_id);
69 device->info = brw_get_device_info(device->chipset_id, -1);
70 if (!device->info)
71 goto fail;
72
73 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
74 goto fail;
75
76 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
77 goto fail;
78
79 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
80 goto fail;
81
82 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
83 goto fail;
84
85 close(fd);
86
87 return VK_SUCCESS;
88
89 fail:
90 close(fd);
91
92 return vk_error(VK_ERROR_UNAVAILABLE);
93}
94
95static void *default_alloc(
96 void* pUserData,
97 size_t size,
98 size_t alignment,
99 VkSystemAllocType allocType)
100{
101 return malloc(size);
102}
103
104static void default_free(
105 void* pUserData,
106 void* pMem)
107{
108 free(pMem);
109}
110
111static const VkAllocCallbacks default_alloc_callbacks = {
112 .pUserData = NULL,
113 .pfnAlloc = default_alloc,
114 .pfnFree = default_free
115};
116
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700117VkResult anv_CreateInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700118 const VkInstanceCreateInfo* pCreateInfo,
119 VkInstance* pInstance)
120{
121 struct anv_instance *instance;
122 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
123 void *user_data = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700124
125 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
126
127 if (pCreateInfo->pAllocCb) {
128 alloc_callbacks = pCreateInfo->pAllocCb;
129 user_data = pCreateInfo->pAllocCb->pUserData;
130 }
131 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
132 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
133 if (!instance)
134 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
135
136 instance->pAllocUserData = alloc_callbacks->pUserData;
137 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
138 instance->pfnFree = alloc_callbacks->pfnFree;
139 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700140 instance->physicalDeviceCount = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700141
142 *pInstance = (VkInstance) instance;
143
144 return VK_SUCCESS;
145}
146
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700147VkResult anv_DestroyInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700148 VkInstance _instance)
149{
150 struct anv_instance *instance = (struct anv_instance *) _instance;
151
152 instance->pfnFree(instance->pAllocUserData, instance);
153
154 return VK_SUCCESS;
155}
156
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700157VkResult anv_EnumeratePhysicalDevices(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700158 VkInstance _instance,
159 uint32_t* pPhysicalDeviceCount,
160 VkPhysicalDevice* pPhysicalDevices)
161{
162 struct anv_instance *instance = (struct anv_instance *) _instance;
Chad Versacefa915b62015-07-09 15:38:58 -0700163 VkResult result;
164
165 if (instance->physicalDeviceCount == 0) {
166 result = fill_physical_device(&instance->physicalDevice,
167 instance, "/dev/dri/renderD128");
168 if (result != VK_SUCCESS)
169 return result;
170
171 instance->physicalDeviceCount++;
172 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700173
174 if (*pPhysicalDeviceCount >= 1)
175 pPhysicalDevices[0] = (VkPhysicalDevice) &instance->physicalDevice;
176 *pPhysicalDeviceCount = instance->physicalDeviceCount;
177
178 return VK_SUCCESS;
179}
180
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700181VkResult anv_GetPhysicalDeviceInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700182 VkPhysicalDevice physicalDevice,
183 VkPhysicalDeviceInfoType infoType,
184 size_t* pDataSize,
185 void* pData)
186{
187 struct anv_physical_device *device = (struct anv_physical_device *) physicalDevice;
188 VkPhysicalDeviceProperties *properties;
189 VkPhysicalDevicePerformance *performance;
190 VkPhysicalDeviceQueueProperties *queue_properties;
191 VkPhysicalDeviceMemoryProperties *memory_properties;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700192 VkDisplayPropertiesWSI *display_properties;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700193 uint64_t ns_per_tick = 80;
194
Kristian Høgsberga29df712015-05-15 22:04:52 -0700195 switch ((uint32_t) infoType) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700196 case VK_PHYSICAL_DEVICE_INFO_TYPE_PROPERTIES:
197 properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700198
199 *pDataSize = sizeof(*properties);
200 if (pData == NULL)
201 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700202
203 properties->apiVersion = 1;
204 properties->driverVersion = 1;
205 properties->vendorId = 0x8086;
206 properties->deviceId = device->chipset_id;
207 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
208 strcpy(properties->deviceName, device->name);
209 properties->maxInlineMemoryUpdateSize = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700210 properties->maxBoundDescriptorSets = MAX_SETS;
211 properties->maxThreadGroupSize = 512;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700212 properties->timestampFrequency = 1000 * 1000 * 1000 / ns_per_tick;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700213 properties->multiColorAttachmentClears = true;
214 properties->maxDescriptorSets = 8;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700215 properties->maxViewports = 16;
216 properties->maxColorAttachments = 8;
217 return VK_SUCCESS;
218
219 case VK_PHYSICAL_DEVICE_INFO_TYPE_PERFORMANCE:
220 performance = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700221
222 *pDataSize = sizeof(*performance);
223 if (pData == NULL)
224 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700225
226 performance->maxDeviceClock = 1.0;
227 performance->aluPerClock = 1.0;
228 performance->texPerClock = 1.0;
229 performance->primsPerClock = 1.0;
230 performance->pixelsPerClock = 1.0;
231 return VK_SUCCESS;
232
233 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PROPERTIES:
234 queue_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700235
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700236 *pDataSize = sizeof(*queue_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700237 if (pData == NULL)
238 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700239
240 queue_properties->queueFlags = 0;
241 queue_properties->queueCount = 1;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700242 queue_properties->supportsTimestamps = true;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700243 return VK_SUCCESS;
244
245 case VK_PHYSICAL_DEVICE_INFO_TYPE_MEMORY_PROPERTIES:
246 memory_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700247
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700248 *pDataSize = sizeof(*memory_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700249 if (pData == NULL)
250 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700251
252 memory_properties->supportsMigration = false;
253 memory_properties->supportsPinning = false;
254 return VK_SUCCESS;
255
Kristian Høgsberga29df712015-05-15 22:04:52 -0700256 case VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI:
257 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI");
258
259 *pDataSize = sizeof(*display_properties);
260 if (pData == NULL)
261 return VK_SUCCESS;
262
263 display_properties = pData;
264 display_properties->display = 0;
265 display_properties->physicalResolution = (VkExtent2D) { 0, 0 };
266 return VK_SUCCESS;
267
268 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI:
269 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI");
270 return VK_SUCCESS;
271
272
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700273 default:
274 return VK_UNSUPPORTED;
275 }
276
277}
278
Jason Ekstrande7acdda2015-07-07 18:51:53 -0700279PFN_vkVoidFunction anv_GetInstanceProcAddr(
280 VkInstance instance,
281 const char* pName)
282{
283 return anv_lookup_entrypoint(pName);
284}
285
286PFN_vkVoidFunction anv_GetDeviceProcAddr(
287 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700288 const char* pName)
289{
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700290 return anv_lookup_entrypoint(pName);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700291}
292
293static void
294parse_debug_flags(struct anv_device *device)
295{
296 const char *debug, *p, *end;
297
298 debug = getenv("INTEL_DEBUG");
299 device->dump_aub = false;
300 if (debug) {
301 for (p = debug; *p; p = end + 1) {
302 end = strchrnul(p, ',');
303 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
304 device->dump_aub = true;
305 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
306 device->no_hw = true;
307 if (*end == '\0')
308 break;
309 }
310 }
311}
312
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700313static VkResult
314anv_queue_init(struct anv_device *device, struct anv_queue *queue)
315{
316 queue->device = device;
317 queue->pool = &device->surface_state_pool;
318
319 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
320 if (queue->completed_serial.map == NULL)
321 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
322
323 *(uint32_t *)queue->completed_serial.map = 0;
324 queue->next_serial = 1;
325
326 return VK_SUCCESS;
327}
328
329static void
330anv_queue_finish(struct anv_queue *queue)
331{
332#ifdef HAVE_VALGRIND
333 /* This gets torn down with the device so we only need to do this if
334 * valgrind is present.
335 */
336 anv_state_pool_free(queue->pool, queue->completed_serial);
337#endif
338}
339
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700340static void
341anv_device_init_border_colors(struct anv_device *device)
342{
Jason Ekstrand522ab832015-07-08 11:44:52 -0700343 static const VkClearColorValue border_colors[] = {
344 [VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 0.0 } },
345 [VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK] = { .f32 = { 0.0, 0.0, 0.0, 1.0 } },
346 [VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE] = { .f32 = { 1.0, 1.0, 1.0, 1.0 } },
347 [VK_BORDER_COLOR_INT_TRANSPARENT_BLACK] = { .u32 = { 0, 0, 0, 0 } },
348 [VK_BORDER_COLOR_INT_OPAQUE_BLACK] = { .u32 = { 0, 0, 0, 1 } },
349 [VK_BORDER_COLOR_INT_OPAQUE_WHITE] = { .u32 = { 1, 1, 1, 1 } },
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700350 };
351
Jason Ekstrand522ab832015-07-08 11:44:52 -0700352 device->border_colors =
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700353 anv_state_pool_alloc(&device->dynamic_state_pool,
Jason Ekstrand522ab832015-07-08 11:44:52 -0700354 sizeof(border_colors), 32);
355 memcpy(device->border_colors.map, border_colors, sizeof(border_colors));
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700356}
357
Jason Ekstrand730ca0e2015-05-28 10:20:18 -0700358static const uint32_t BATCH_SIZE = 8192;
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700359
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700360VkResult anv_CreateDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700361 VkPhysicalDevice _physicalDevice,
362 const VkDeviceCreateInfo* pCreateInfo,
363 VkDevice* pDevice)
364{
365 struct anv_physical_device *physicalDevice =
366 (struct anv_physical_device *) _physicalDevice;
367 struct anv_instance *instance = physicalDevice->instance;
368 struct anv_device *device;
369
370 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
371
372 device = instance->pfnAlloc(instance->pAllocUserData,
373 sizeof(*device), 8,
374 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
375 if (!device)
376 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
377
378 device->no_hw = physicalDevice->no_hw;
379 parse_debug_flags(device);
380
381 device->instance = physicalDevice->instance;
Chad Versacec34d3142015-07-09 15:23:25 -0700382 device->fd = open(physicalDevice->path, O_RDWR | O_CLOEXEC);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700383 if (device->fd == -1)
384 goto fail_device;
385
386 device->context_id = anv_gem_create_context(device);
387 if (device->context_id == -1)
388 goto fail_fd;
389
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700390 anv_bo_pool_init(&device->batch_bo_pool, device, BATCH_SIZE);
391
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700392 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700393
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700394 anv_state_pool_init(&device->dynamic_state_pool,
395 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700396
397 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
398 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
399
400 anv_state_pool_init(&device->surface_state_pool,
401 &device->surface_state_block_pool);
402
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -0700403 anv_block_pool_init(&device->scratch_block_pool, device, 0x10000);
404
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700405 device->info = *physicalDevice->info;
406
Kristian Høgsberg Kristensen9eab70e2015-06-03 23:03:29 -0700407 device->compiler = anv_compiler_create(device);
408 device->aub_writer = NULL;
409
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700410 pthread_mutex_init(&device->mutex, NULL);
411
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700412 anv_queue_init(device, &device->queue);
413
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -0700414 anv_device_init_meta(device);
415
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -0700416 anv_device_init_border_colors(device);
417
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700418 *pDevice = (VkDevice) device;
419
420 return VK_SUCCESS;
421
422 fail_fd:
423 close(device->fd);
424 fail_device:
425 anv_device_free(device, device);
426
427 return vk_error(VK_ERROR_UNAVAILABLE);
428}
429
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700430VkResult anv_DestroyDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700431 VkDevice _device)
432{
433 struct anv_device *device = (struct anv_device *) _device;
434
435 anv_compiler_destroy(device->compiler);
436
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700437 anv_queue_finish(&device->queue);
438
Jason Ekstrand3a38b0d2015-06-09 11:08:51 -0700439 anv_device_finish_meta(device);
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700440
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700441#ifdef HAVE_VALGRIND
442 /* We only need to free these to prevent valgrind errors. The backing
443 * BO will go away in a couple of lines so we don't actually leak.
444 */
Jason Ekstrand522ab832015-07-08 11:44:52 -0700445 anv_state_pool_free(&device->dynamic_state_pool, device->border_colors);
Jason Ekstrand38f5eef2015-06-09 11:41:31 -0700446#endif
447
Jason Ekstrand5ef81f02015-05-25 15:46:48 -0700448 anv_bo_pool_finish(&device->batch_bo_pool);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700449 anv_block_pool_finish(&device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700450 anv_block_pool_finish(&device->instruction_block_pool);
451 anv_block_pool_finish(&device->surface_state_block_pool);
452
453 close(device->fd);
454
455 if (device->aub_writer)
456 anv_aub_writer_destroy(device->aub_writer);
457
458 anv_device_free(device, device);
459
460 return VK_SUCCESS;
461}
462
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700463static const VkExtensionProperties global_extensions[] = {
464 {
465 .extName = "VK_WSI_LunarG",
466 .version = 3
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700467 }
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700468};
469
470VkResult anv_GetGlobalExtensionCount(
471 uint32_t* pCount)
472{
473 *pCount = ARRAY_SIZE(global_extensions);
474
475 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700476}
477
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700478
479VkResult anv_GetGlobalExtensionProperties(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700480 uint32_t extensionIndex,
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700481 VkExtensionProperties* pProperties)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700482{
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700483 assert(extensionIndex < ARRAY_SIZE(global_extensions));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700484
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700485 *pProperties = global_extensions[extensionIndex];
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700486
Jason Ekstrand8e05bbe2015-07-08 10:38:07 -0700487 return VK_SUCCESS;
488}
489
490VkResult anv_GetPhysicalDeviceExtensionCount(
491 VkPhysicalDevice physicalDevice,
492 uint32_t* pCount)
493{
494 /* None supported at this time */
495 *pCount = 0;
496
497 return VK_SUCCESS;
498}
499
500VkResult anv_GetPhysicalDeviceExtensionProperties(
501 VkPhysicalDevice physicalDevice,
502 uint32_t extensionIndex,
503 VkExtensionProperties* pProperties)
504{
505 /* None supported at this time */
506 return vk_error(VK_ERROR_INVALID_EXTENSION);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700507}
508
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700509VkResult anv_EnumerateLayers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700510 VkPhysicalDevice physicalDevice,
511 size_t maxStringSize,
512 size_t* pLayerCount,
513 char* const* pOutLayers,
514 void* pReserved)
515{
516 *pLayerCount = 0;
517
518 return VK_SUCCESS;
519}
520
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700521VkResult anv_GetDeviceQueue(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700522 VkDevice _device,
523 uint32_t queueNodeIndex,
524 uint32_t queueIndex,
525 VkQueue* pQueue)
526{
527 struct anv_device *device = (struct anv_device *) _device;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700528
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700529 assert(queueIndex == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700530
Jason Ekstrand66b00d52015-06-09 12:28:58 -0700531 *pQueue = (VkQueue) &device->queue;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700532
533 return VK_SUCCESS;
534}
535
Jason Ekstrand59def432015-05-27 11:41:28 -0700536VkResult
Jason Ekstrand403266b2015-05-25 17:38:15 -0700537anv_reloc_list_init(struct anv_reloc_list *list, struct anv_device *device)
538{
539 list->num_relocs = 0;
540 list->array_length = 256;
541 list->relocs =
542 anv_device_alloc(device, list->array_length * sizeof(*list->relocs), 8,
543 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
544
545 if (list->relocs == NULL)
546 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
547
548 list->reloc_bos =
549 anv_device_alloc(device, list->array_length * sizeof(*list->reloc_bos), 8,
550 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
551
552 if (list->relocs == NULL) {
553 anv_device_free(device, list->relocs);
554 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
555 }
556
557 return VK_SUCCESS;
558}
559
Jason Ekstrand59def432015-05-27 11:41:28 -0700560void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700561anv_reloc_list_finish(struct anv_reloc_list *list, struct anv_device *device)
562{
563 anv_device_free(device, list->relocs);
564 anv_device_free(device, list->reloc_bos);
565}
566
567static VkResult
568anv_reloc_list_grow(struct anv_reloc_list *list, struct anv_device *device,
569 size_t num_additional_relocs)
570{
571 if (list->num_relocs + num_additional_relocs <= list->array_length)
572 return VK_SUCCESS;
573
574 size_t new_length = list->array_length * 2;
575 while (new_length < list->num_relocs + num_additional_relocs)
576 new_length *= 2;
577
578 struct drm_i915_gem_relocation_entry *new_relocs =
579 anv_device_alloc(device, new_length * sizeof(*list->relocs), 8,
580 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
581 if (new_relocs == NULL)
582 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
583
584 struct anv_bo **new_reloc_bos =
585 anv_device_alloc(device, new_length * sizeof(*list->reloc_bos), 8,
586 VK_SYSTEM_ALLOC_TYPE_INTERNAL);
587 if (new_relocs == NULL) {
588 anv_device_free(device, new_relocs);
589 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
590 }
591
592 memcpy(new_relocs, list->relocs, list->num_relocs * sizeof(*list->relocs));
593 memcpy(new_reloc_bos, list->reloc_bos,
594 list->num_relocs * sizeof(*list->reloc_bos));
595
596 anv_device_free(device, list->relocs);
597 anv_device_free(device, list->reloc_bos);
598
599 list->relocs = new_relocs;
600 list->reloc_bos = new_reloc_bos;
601
602 return VK_SUCCESS;
603}
604
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700605static VkResult
606anv_batch_bo_create(struct anv_device *device, struct anv_batch_bo **bbo_out)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700607{
608 VkResult result;
609
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700610 struct anv_batch_bo *bbo =
611 anv_device_alloc(device, sizeof(*bbo), 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
612 if (bbo == NULL)
613 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700614
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700615 bbo->num_relocs = 0;
616 bbo->prev_batch_bo = NULL;
617
618 result = anv_bo_pool_alloc(&device->batch_bo_pool, &bbo->bo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700619 if (result != VK_SUCCESS) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700620 anv_device_free(device, bbo);
Jason Ekstrand403266b2015-05-25 17:38:15 -0700621 return result;
622 }
623
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700624 *bbo_out = bbo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700625
626 return VK_SUCCESS;
627}
628
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700629static void
630anv_batch_bo_start(struct anv_batch_bo *bbo, struct anv_batch *batch,
631 size_t batch_padding)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700632{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700633 batch->next = batch->start = bbo->bo.map;
634 batch->end = bbo->bo.map + bbo->bo.size - batch_padding;
635 bbo->first_reloc = batch->relocs.num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700636}
637
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700638static void
639anv_batch_bo_finish(struct anv_batch_bo *bbo, struct anv_batch *batch)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700640{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700641 assert(batch->start == bbo->bo.map);
642 bbo->length = batch->next - batch->start;
Jason Ekstrand9cae3d12015-06-09 21:36:12 -0700643 VG(VALGRIND_CHECK_MEM_IS_DEFINED(batch->start, bbo->length));
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700644 bbo->num_relocs = batch->relocs.num_relocs - bbo->first_reloc;
645}
646
647static void
648anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_device *device)
649{
650 anv_bo_pool_free(&device->batch_bo_pool, &bbo->bo);
651 anv_device_free(device, bbo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700652}
653
654void *
655anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords)
656{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700657 if (batch->next + num_dwords * 4 > batch->end)
658 batch->extend_cb(batch, batch->user_data);
659
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700660 void *p = batch->next;
661
662 batch->next += num_dwords * 4;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700663 assert(batch->next <= batch->end);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700664
665 return p;
666}
667
668static void
Jason Ekstrand403266b2015-05-25 17:38:15 -0700669anv_reloc_list_append(struct anv_reloc_list *list, struct anv_device *device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700670 struct anv_reloc_list *other, uint32_t offset)
671{
Jason Ekstrand403266b2015-05-25 17:38:15 -0700672 anv_reloc_list_grow(list, device, other->num_relocs);
673 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700674
Jason Ekstrand403266b2015-05-25 17:38:15 -0700675 memcpy(&list->relocs[list->num_relocs], &other->relocs[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700676 other->num_relocs * sizeof(other->relocs[0]));
Jason Ekstrand403266b2015-05-25 17:38:15 -0700677 memcpy(&list->reloc_bos[list->num_relocs], &other->reloc_bos[0],
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700678 other->num_relocs * sizeof(other->reloc_bos[0]));
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700679
Jason Ekstrand403266b2015-05-25 17:38:15 -0700680 for (uint32_t i = 0; i < other->num_relocs; i++)
681 list->relocs[i + list->num_relocs].offset += offset;
682
683 list->num_relocs += other->num_relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700684}
685
686static uint64_t
Jason Ekstrand403266b2015-05-25 17:38:15 -0700687anv_reloc_list_add(struct anv_reloc_list *list, struct anv_device *device,
688 uint32_t offset, struct anv_bo *target_bo, uint32_t delta)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700689{
690 struct drm_i915_gem_relocation_entry *entry;
691 int index;
692
Jason Ekstrand403266b2015-05-25 17:38:15 -0700693 anv_reloc_list_grow(list, device, 1);
694 /* TODO: Handle failure */
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700695
696 /* XXX: Can we use I915_EXEC_HANDLE_LUT? */
697 index = list->num_relocs++;
698 list->reloc_bos[index] = target_bo;
699 entry = &list->relocs[index];
700 entry->target_handle = target_bo->gem_handle;
701 entry->delta = delta;
702 entry->offset = offset;
703 entry->presumed_offset = target_bo->offset;
704 entry->read_domains = 0;
705 entry->write_domain = 0;
706
707 return target_bo->offset + delta;
708}
709
710void
711anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
712{
713 uint32_t size, offset;
714
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700715 size = other->next - other->start;
716 assert(size % 4 == 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700717
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700718 if (batch->next + size > batch->end)
719 batch->extend_cb(batch, batch->user_data);
720
721 assert(batch->next + size <= batch->end);
722
723 memcpy(batch->next, other->start, size);
724
725 offset = batch->next - batch->start;
726 anv_reloc_list_append(&batch->relocs, batch->device,
727 &other->relocs, offset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700728
729 batch->next += size;
730}
731
732uint64_t
733anv_batch_emit_reloc(struct anv_batch *batch,
734 void *location, struct anv_bo *bo, uint32_t delta)
735{
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700736 return anv_reloc_list_add(&batch->relocs, batch->device,
737 location - batch->start, bo, delta);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700738}
739
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700740VkResult anv_QueueSubmit(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700741 VkQueue _queue,
742 uint32_t cmdBufferCount,
743 const VkCmdBuffer* pCmdBuffers,
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700744 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700745{
746 struct anv_queue *queue = (struct anv_queue *) _queue;
747 struct anv_device *device = queue->device;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700748 struct anv_fence *fence = (struct anv_fence *) _fence;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700749 int ret;
750
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700751 for (uint32_t i = 0; i < cmdBufferCount; i++) {
752 struct anv_cmd_buffer *cmd_buffer =
753 (struct anv_cmd_buffer *) pCmdBuffers[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700754
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700755 if (device->dump_aub)
756 anv_cmd_buffer_dump(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700757
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700758 if (!device->no_hw) {
759 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf);
760 if (ret != 0)
Kristian Høgsberg2b7a0602015-05-12 14:38:58 -0700761 return vk_error(VK_ERROR_UNKNOWN);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700762
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700763 if (fence) {
764 ret = anv_gem_execbuffer(device, &fence->execbuf);
765 if (ret != 0)
766 return vk_error(VK_ERROR_UNKNOWN);
767 }
768
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700769 for (uint32_t i = 0; i < cmd_buffer->bo_count; i++)
770 cmd_buffer->exec2_bos[i]->offset = cmd_buffer->exec2_objects[i].offset;
771 } else {
772 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
773 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700774 }
775
776 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700777}
778
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700779VkResult anv_QueueWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700780 VkQueue _queue)
781{
782 struct anv_queue *queue = (struct anv_queue *) _queue;
783
784 return vkDeviceWaitIdle((VkDevice) queue->device);
785}
786
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700787VkResult anv_DeviceWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700788 VkDevice _device)
789{
790 struct anv_device *device = (struct anv_device *) _device;
791 struct anv_state state;
792 struct anv_batch batch;
793 struct drm_i915_gem_execbuffer2 execbuf;
794 struct drm_i915_gem_exec_object2 exec2_objects[1];
795 struct anv_bo *bo = NULL;
796 VkResult result;
797 int64_t timeout;
798 int ret;
799
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700800 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
801 bo = &device->dynamic_state_pool.block_pool->bo;
Jason Ekstrandda8f1482015-05-27 11:42:55 -0700802 batch.start = batch.next = state.map;
803 batch.end = state.map + 32;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700804 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
805 anv_batch_emit(&batch, GEN8_MI_NOOP);
806
807 exec2_objects[0].handle = bo->gem_handle;
808 exec2_objects[0].relocation_count = 0;
809 exec2_objects[0].relocs_ptr = 0;
810 exec2_objects[0].alignment = 0;
811 exec2_objects[0].offset = bo->offset;
812 exec2_objects[0].flags = 0;
813 exec2_objects[0].rsvd1 = 0;
814 exec2_objects[0].rsvd2 = 0;
815
816 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
817 execbuf.buffer_count = 1;
818 execbuf.batch_start_offset = state.offset;
819 execbuf.batch_len = batch.next - state.map;
820 execbuf.cliprects_ptr = 0;
821 execbuf.num_cliprects = 0;
822 execbuf.DR1 = 0;
823 execbuf.DR4 = 0;
824
825 execbuf.flags =
826 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
827 execbuf.rsvd1 = device->context_id;
828 execbuf.rsvd2 = 0;
829
830 if (!device->no_hw) {
831 ret = anv_gem_execbuffer(device, &execbuf);
832 if (ret != 0) {
833 result = vk_error(VK_ERROR_UNKNOWN);
834 goto fail;
835 }
836
837 timeout = INT64_MAX;
838 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
839 if (ret != 0) {
840 result = vk_error(VK_ERROR_UNKNOWN);
841 goto fail;
842 }
843 }
844
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700845 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700846
847 return VK_SUCCESS;
848
849 fail:
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700850 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700851
852 return result;
853}
854
855void *
856anv_device_alloc(struct anv_device * device,
857 size_t size,
858 size_t alignment,
859 VkSystemAllocType allocType)
860{
861 return device->instance->pfnAlloc(device->instance->pAllocUserData,
862 size,
863 alignment,
864 allocType);
865}
866
867void
868anv_device_free(struct anv_device * device,
869 void * mem)
870{
871 return device->instance->pfnFree(device->instance->pAllocUserData,
872 mem);
873}
874
875VkResult
876anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
877{
878 bo->gem_handle = anv_gem_create(device, size);
879 if (!bo->gem_handle)
880 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
881
882 bo->map = NULL;
883 bo->index = 0;
884 bo->offset = 0;
885 bo->size = size;
886
887 return VK_SUCCESS;
888}
889
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700890VkResult anv_AllocMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700891 VkDevice _device,
892 const VkMemoryAllocInfo* pAllocInfo,
893 VkDeviceMemory* pMem)
894{
895 struct anv_device *device = (struct anv_device *) _device;
896 struct anv_device_memory *mem;
897 VkResult result;
898
899 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
900
901 mem = anv_device_alloc(device, sizeof(*mem), 8,
902 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
903 if (mem == NULL)
904 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
905
906 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
907 if (result != VK_SUCCESS)
908 goto fail;
909
910 *pMem = (VkDeviceMemory) mem;
911
912 return VK_SUCCESS;
913
914 fail:
915 anv_device_free(device, mem);
916
917 return result;
918}
919
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700920VkResult anv_FreeMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700921 VkDevice _device,
922 VkDeviceMemory _mem)
923{
924 struct anv_device *device = (struct anv_device *) _device;
925 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
926
927 if (mem->bo.map)
928 anv_gem_munmap(mem->bo.map, mem->bo.size);
929
930 if (mem->bo.gem_handle != 0)
931 anv_gem_close(device, mem->bo.gem_handle);
932
933 anv_device_free(device, mem);
934
935 return VK_SUCCESS;
936}
937
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700938VkResult anv_MapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700939 VkDevice _device,
940 VkDeviceMemory _mem,
941 VkDeviceSize offset,
942 VkDeviceSize size,
943 VkMemoryMapFlags flags,
944 void** ppData)
945{
946 struct anv_device *device = (struct anv_device *) _device;
947 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
948
949 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
950 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
951 * at a time is valid. We could just mmap up front and return an offset
952 * pointer here, but that may exhaust virtual memory on 32 bit
953 * userspace. */
954
955 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
956 mem->map_size = size;
957
958 *ppData = mem->map;
959
960 return VK_SUCCESS;
961}
962
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700963VkResult anv_UnmapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700964 VkDevice _device,
965 VkDeviceMemory _mem)
966{
967 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
968
969 anv_gem_munmap(mem->map, mem->map_size);
970
971 return VK_SUCCESS;
972}
973
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700974VkResult anv_FlushMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700975 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700976 uint32_t memRangeCount,
977 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700978{
979 /* clflush here for !llc platforms */
980
981 return VK_SUCCESS;
982}
983
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700984VkResult anv_InvalidateMappedMemoryRanges(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700985 VkDevice device,
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700986 uint32_t memRangeCount,
987 const VkMappedMemoryRange* pMemRanges)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700988{
Jason Ekstrandd9c2cae2015-07-07 17:22:29 -0700989 return anv_FlushMappedMemoryRanges(device, memRangeCount, pMemRanges);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700990}
991
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700992VkResult anv_DestroyObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700993 VkDevice _device,
994 VkObjectType objType,
Jason Ekstrand57153da2015-05-22 15:15:08 -0700995 VkObject _object)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700996{
997 struct anv_device *device = (struct anv_device *) _device;
Jason Ekstrand57153da2015-05-22 15:15:08 -0700998 struct anv_object *object = (struct anv_object *) _object;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700999
Jason Ekstrand57153da2015-05-22 15:15:08 -07001000 switch (objType) {
1001 case VK_OBJECT_TYPE_INSTANCE:
1002 return anv_DestroyInstance((VkInstance) _object);
1003
1004 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
1005 /* We don't want to actually destroy physical devices */
1006 return VK_SUCCESS;
1007
1008 case VK_OBJECT_TYPE_DEVICE:
1009 assert(_device == (VkDevice) _object);
1010 return anv_DestroyDevice((VkDevice) _object);
1011
1012 case VK_OBJECT_TYPE_QUEUE:
1013 /* TODO */
1014 return VK_SUCCESS;
1015
1016 case VK_OBJECT_TYPE_DEVICE_MEMORY:
1017 return anv_FreeMemory(_device, (VkDeviceMemory) _object);
1018
1019 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
Jason Ekstrand5a4ebf62015-07-08 17:29:49 -07001020 case VK_OBJECT_TYPE_PIPELINE_CACHE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001021 /* These are just dummys anyway, so we don't need to destroy them */
1022 return VK_SUCCESS;
1023
1024 case VK_OBJECT_TYPE_BUFFER:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001025 case VK_OBJECT_TYPE_IMAGE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001026 case VK_OBJECT_TYPE_DEPTH_STENCIL_VIEW:
1027 case VK_OBJECT_TYPE_SHADER:
Jason Ekstrand5a4ebf62015-07-08 17:29:49 -07001028 case VK_OBJECT_TYPE_SHADER_MODULE:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001029 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
1030 case VK_OBJECT_TYPE_SAMPLER:
1031 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
1032 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
1033 case VK_OBJECT_TYPE_DYNAMIC_RS_STATE:
1034 case VK_OBJECT_TYPE_DYNAMIC_CB_STATE:
1035 case VK_OBJECT_TYPE_DYNAMIC_DS_STATE:
1036 case VK_OBJECT_TYPE_RENDER_PASS:
1037 /* These are trivially destroyable */
1038 anv_device_free(device, (void *) _object);
1039 return VK_SUCCESS;
1040
1041 case VK_OBJECT_TYPE_COMMAND_BUFFER:
1042 case VK_OBJECT_TYPE_PIPELINE:
1043 case VK_OBJECT_TYPE_DYNAMIC_VP_STATE:
1044 case VK_OBJECT_TYPE_FENCE:
1045 case VK_OBJECT_TYPE_QUERY_POOL:
1046 case VK_OBJECT_TYPE_FRAMEBUFFER:
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001047 case VK_OBJECT_TYPE_BUFFER_VIEW:
1048 case VK_OBJECT_TYPE_IMAGE_VIEW:
1049 case VK_OBJECT_TYPE_COLOR_ATTACHMENT_VIEW:
Jason Ekstrand57153da2015-05-22 15:15:08 -07001050 (object->destructor)(device, object, objType);
1051 return VK_SUCCESS;
1052
1053 case VK_OBJECT_TYPE_SEMAPHORE:
1054 case VK_OBJECT_TYPE_EVENT:
1055 stub_return(VK_UNSUPPORTED);
1056
1057 default:
1058 unreachable("Invalid object type");
1059 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001060}
1061
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001062VkResult anv_GetObjectMemoryRequirements(
1063 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001064 VkObjectType objType,
1065 VkObject object,
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001066 VkMemoryRequirements* pMemoryRequirements)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001067{
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001068 pMemoryRequirements->memPropsAllowed =
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001069 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
Jason Ekstrand68fa7502015-07-06 17:32:28 -07001070 /* VK_MEMORY_PROPERTY_HOST_NON_COHERENT_BIT | */
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001071 /* VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT | */
Jason Ekstrand65f9ccb2015-07-06 17:33:43 -07001072 VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001073
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001074 pMemoryRequirements->memPropsRequired = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001075
1076 switch (objType) {
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001077 case VK_OBJECT_TYPE_BUFFER: {
1078 struct anv_buffer *buffer = (struct anv_buffer *) object;
1079 pMemoryRequirements->size = buffer->size;
1080 pMemoryRequirements->alignment = 16;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001081 break;
1082 }
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001083 case VK_OBJECT_TYPE_IMAGE: {
1084 struct anv_image *image = (struct anv_image *) object;
1085 pMemoryRequirements->size = image->size;
1086 pMemoryRequirements->alignment = image->alignment;
1087 break;
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001088 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001089 default:
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001090 pMemoryRequirements->size = 0;
1091 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001092 }
1093
Jason Ekstrandef8980e2015-07-07 18:16:42 -07001094 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001095}
1096
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001097VkResult anv_BindObjectMemory(
1098 VkDevice device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001099 VkObjectType objType,
1100 VkObject object,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001101 VkDeviceMemory _mem,
1102 VkDeviceSize memOffset)
1103{
1104 struct anv_buffer *buffer;
1105 struct anv_image *image;
1106 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1107
1108 switch (objType) {
1109 case VK_OBJECT_TYPE_BUFFER:
1110 buffer = (struct anv_buffer *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001111 buffer->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001112 buffer->offset = memOffset;
1113 break;
1114 case VK_OBJECT_TYPE_IMAGE:
1115 image = (struct anv_image *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001116 image->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001117 image->offset = memOffset;
1118 break;
1119 default:
1120 break;
1121 }
Jason Ekstrandbb6567f2015-07-08 09:04:16 -07001122
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001123 return VK_SUCCESS;
1124}
1125
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001126VkResult anv_QueueBindSparseBufferMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001127 VkQueue queue,
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001128 VkBuffer buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001129 VkDeviceSize rangeOffset,
1130 VkDeviceSize rangeSize,
1131 VkDeviceMemory mem,
1132 VkDeviceSize memOffset)
1133{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001134 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001135}
1136
Jason Ekstrand3c65a1a2015-07-08 09:16:48 -07001137VkResult anv_QueueBindSparseImageMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001138 VkQueue queue,
1139 VkImage image,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001140 const VkImageMemoryBindInfo* pBindInfo,
1141 VkDeviceMemory mem,
1142 VkDeviceSize memOffset)
1143{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001144 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001145}
1146
Jason Ekstrand57153da2015-05-22 15:15:08 -07001147static void
1148anv_fence_destroy(struct anv_device *device,
1149 struct anv_object *object,
1150 VkObjectType obj_type)
1151{
1152 struct anv_fence *fence = (struct anv_fence *) object;
1153
1154 assert(obj_type == VK_OBJECT_TYPE_FENCE);
1155
1156 anv_gem_munmap(fence->bo.map, fence->bo.size);
1157 anv_gem_close(device, fence->bo.gem_handle);
1158 anv_device_free(device, fence);
1159}
1160
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001161VkResult anv_CreateFence(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001162 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001163 const VkFenceCreateInfo* pCreateInfo,
1164 VkFence* pFence)
1165{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001166 struct anv_device *device = (struct anv_device *) _device;
1167 struct anv_fence *fence;
1168 struct anv_batch batch;
1169 VkResult result;
1170
1171 const uint32_t fence_size = 128;
1172
1173 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1174
1175 fence = anv_device_alloc(device, sizeof(*fence), 8,
1176 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1177 if (fence == NULL)
1178 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1179
1180 result = anv_bo_init_new(&fence->bo, device, fence_size);
1181 if (result != VK_SUCCESS)
1182 goto fail;
1183
Jason Ekstrand57153da2015-05-22 15:15:08 -07001184 fence->base.destructor = anv_fence_destroy;
1185
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001186 fence->bo.map =
1187 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07001188 batch.next = batch.start = fence->bo.map;
1189 batch.end = fence->bo.map + fence->bo.size;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001190 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1191 anv_batch_emit(&batch, GEN8_MI_NOOP);
1192
1193 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1194 fence->exec2_objects[0].relocation_count = 0;
1195 fence->exec2_objects[0].relocs_ptr = 0;
1196 fence->exec2_objects[0].alignment = 0;
1197 fence->exec2_objects[0].offset = fence->bo.offset;
1198 fence->exec2_objects[0].flags = 0;
1199 fence->exec2_objects[0].rsvd1 = 0;
1200 fence->exec2_objects[0].rsvd2 = 0;
1201
1202 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1203 fence->execbuf.buffer_count = 1;
1204 fence->execbuf.batch_start_offset = 0;
1205 fence->execbuf.batch_len = batch.next - fence->bo.map;
1206 fence->execbuf.cliprects_ptr = 0;
1207 fence->execbuf.num_cliprects = 0;
1208 fence->execbuf.DR1 = 0;
1209 fence->execbuf.DR4 = 0;
1210
1211 fence->execbuf.flags =
1212 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1213 fence->execbuf.rsvd1 = device->context_id;
1214 fence->execbuf.rsvd2 = 0;
1215
Chad Versace87d98e12015-06-04 14:31:53 -07001216 *pFence = (VkFence) fence;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001217
1218 return VK_SUCCESS;
1219
1220 fail:
1221 anv_device_free(device, fence);
1222
1223 return result;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001224}
1225
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001226VkResult anv_ResetFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001227 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001228 uint32_t fenceCount,
Jason Ekstrandd5349b12015-07-07 17:18:00 -07001229 const VkFence* pFences)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001230{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001231 struct anv_fence **fences = (struct anv_fence **) pFences;
1232
Kristian Høgsberg Kristensen52637c02015-06-05 11:51:30 -07001233 for (uint32_t i = 0; i < fenceCount; i++)
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001234 fences[i]->ready = false;
1235
1236 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001237}
1238
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001239VkResult anv_GetFenceStatus(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001240 VkDevice _device,
1241 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001242{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001243 struct anv_device *device = (struct anv_device *) _device;
1244 struct anv_fence *fence = (struct anv_fence *) _fence;
1245 int64_t t = 0;
1246 int ret;
1247
1248 if (fence->ready)
1249 return VK_SUCCESS;
1250
1251 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1252 if (ret == 0) {
1253 fence->ready = true;
1254 return VK_SUCCESS;
1255 }
1256
1257 return VK_NOT_READY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001258}
1259
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001260VkResult anv_WaitForFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001261 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001262 uint32_t fenceCount,
1263 const VkFence* pFences,
1264 bool32_t waitAll,
1265 uint64_t timeout)
1266{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001267 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001268 int64_t t = timeout;
1269 int ret;
1270
1271 /* FIXME: handle !waitAll */
1272
1273 for (uint32_t i = 0; i < fenceCount; i++) {
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001274 ANV_FROM_HANDLE(anv_fence, fence, pFences[i]);
1275 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001276 if (ret == -1 && errno == ETIME)
1277 return VK_TIMEOUT;
1278 else if (ret == -1)
1279 return vk_error(VK_ERROR_UNKNOWN);
1280 }
1281
1282 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001283}
1284
1285// Queue semaphore functions
1286
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001287VkResult anv_CreateSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001288 VkDevice device,
1289 const VkSemaphoreCreateInfo* pCreateInfo,
1290 VkSemaphore* pSemaphore)
1291{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001292 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001293}
1294
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001295VkResult anv_QueueSignalSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001296 VkQueue queue,
1297 VkSemaphore semaphore)
1298{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001299 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001300}
1301
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001302VkResult anv_QueueWaitSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001303 VkQueue queue,
1304 VkSemaphore semaphore)
1305{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001306 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001307}
1308
1309// Event functions
1310
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001311VkResult anv_CreateEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001312 VkDevice device,
1313 const VkEventCreateInfo* pCreateInfo,
1314 VkEvent* pEvent)
1315{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001316 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001317}
1318
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001319VkResult anv_GetEventStatus(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001320 VkDevice device,
1321 VkEvent event)
1322{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001323 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001324}
1325
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001326VkResult anv_SetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001327 VkDevice device,
1328 VkEvent event)
1329{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001330 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001331}
1332
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001333VkResult anv_ResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001334 VkDevice device,
1335 VkEvent event)
1336{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001337 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001338}
1339
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001340// Buffer functions
1341
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001342VkResult anv_CreateBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001343 VkDevice _device,
1344 const VkBufferCreateInfo* pCreateInfo,
1345 VkBuffer* pBuffer)
1346{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001347 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001348 struct anv_buffer *buffer;
1349
1350 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1351
1352 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1353 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1354 if (buffer == NULL)
1355 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1356
1357 buffer->size = pCreateInfo->size;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001358 buffer->bo = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001359 buffer->offset = 0;
1360
1361 *pBuffer = (VkBuffer) buffer;
1362
1363 return VK_SUCCESS;
1364}
1365
1366// Buffer view functions
1367
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001368static void
1369fill_buffer_surface_state(void *state, VkFormat format,
1370 uint32_t offset, uint32_t range)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001371{
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001372 const struct anv_format *info;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001373
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001374 info = anv_format_for_vk_format(format);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001375 /* This assumes RGBA float format. */
1376 uint32_t stride = 4;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001377 uint32_t num_elements = range / stride;
1378
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001379 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1380 .SurfaceType = SURFTYPE_BUFFER,
1381 .SurfaceArray = false,
Chad Versace4c814632015-06-25 18:18:06 -07001382 .SurfaceFormat = info->surface_format,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001383 .SurfaceVerticalAlignment = VALIGN4,
1384 .SurfaceHorizontalAlignment = HALIGN4,
1385 .TileMode = LINEAR,
1386 .VerticalLineStride = 0,
1387 .VerticalLineStrideOffset = 0,
1388 .SamplerL2BypassModeDisable = true,
1389 .RenderCacheReadWriteMode = WriteOnlyCache,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07001390 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001391 .BaseMipLevel = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001392 .SurfaceQPitch = 0,
1393 .Height = (num_elements >> 7) & 0x3fff,
1394 .Width = num_elements & 0x7f,
1395 .Depth = (num_elements >> 21) & 0x3f,
1396 .SurfacePitch = stride - 1,
1397 .MinimumArrayElement = 0,
1398 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1399 .XOffset = 0,
1400 .YOffset = 0,
1401 .SurfaceMinLOD = 0,
1402 .MIPCountLOD = 0,
1403 .AuxiliarySurfaceMode = AUX_NONE,
1404 .RedClearColor = 0,
1405 .GreenClearColor = 0,
1406 .BlueClearColor = 0,
1407 .AlphaClearColor = 0,
1408 .ShaderChannelSelectRed = SCS_RED,
1409 .ShaderChannelSelectGreen = SCS_GREEN,
1410 .ShaderChannelSelectBlue = SCS_BLUE,
1411 .ShaderChannelSelectAlpha = SCS_ALPHA,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001412 .ResourceMinLOD = 0.0,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001413 /* FIXME: We assume that the image must be bound at this time. */
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001414 .SurfaceBaseAddress = { NULL, offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001415 };
1416
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001417 GEN8_RENDER_SURFACE_STATE_pack(NULL, state, &surface_state);
1418}
1419
1420VkResult anv_CreateBufferView(
1421 VkDevice _device,
1422 const VkBufferViewCreateInfo* pCreateInfo,
1423 VkBufferView* pView)
1424{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001425 ANV_FROM_HANDLE(anv_device, device, _device);
1426 ANV_FROM_HANDLE(anv_buffer, buffer, pCreateInfo->buffer);
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001427 struct anv_surface_view *view;
1428
1429 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1430
1431 view = anv_device_alloc(device, sizeof(*view), 8,
1432 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1433 if (view == NULL)
1434 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1435
Jason Ekstrand9d6f55d2015-06-09 11:08:03 -07001436 view->base.destructor = anv_surface_view_destroy;
1437
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001438 view->bo = buffer->bo;
1439 view->offset = buffer->offset + pCreateInfo->offset;
1440 view->surface_state =
1441 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
1442 view->format = pCreateInfo->format;
1443 view->range = pCreateInfo->range;
1444
1445 fill_buffer_surface_state(view->surface_state.map,
1446 pCreateInfo->format, view->offset, pCreateInfo->range);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001447
Chad Versace87d98e12015-06-04 14:31:53 -07001448 *pView = (VkBufferView) view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001449
1450 return VK_SUCCESS;
1451}
1452
1453// Sampler functions
1454
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001455VkResult anv_CreateSampler(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001456 VkDevice _device,
1457 const VkSamplerCreateInfo* pCreateInfo,
1458 VkSampler* pSampler)
1459{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001460 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001461 struct anv_sampler *sampler;
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001462 uint32_t mag_filter, min_filter, max_anisotropy;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001463
Kristian Høgsberg18acfa72015-05-13 13:53:01 -07001464 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001465
1466 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1467 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1468 if (!sampler)
1469 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1470
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001471 static const uint32_t vk_to_gen_tex_filter[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001472 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1473 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001474 };
1475
1476 static const uint32_t vk_to_gen_mipmap_mode[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001477 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1478 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1479 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001480 };
1481
1482 static const uint32_t vk_to_gen_tex_address[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001483 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1484 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1485 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1486 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1487 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001488 };
1489
1490 static const uint32_t vk_to_gen_compare_op[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07001491 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1492 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1493 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1494 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1495 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1496 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1497 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1498 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001499 };
1500
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001501 if (pCreateInfo->maxAnisotropy > 1) {
1502 mag_filter = MAPFILTER_ANISOTROPIC;
1503 min_filter = MAPFILTER_ANISOTROPIC;
1504 max_anisotropy = (pCreateInfo->maxAnisotropy - 2) / 2;
1505 } else {
1506 mag_filter = vk_to_gen_tex_filter[pCreateInfo->magFilter];
1507 min_filter = vk_to_gen_tex_filter[pCreateInfo->minFilter];
1508 max_anisotropy = RATIO21;
1509 }
1510
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001511 struct GEN8_SAMPLER_STATE sampler_state = {
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001512 .SamplerDisable = false,
1513 .TextureBorderColorMode = DX10OGL,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001514 .LODPreClampMode = 0,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001515 .BaseMipLevel = 0.0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001516 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001517 .MagModeFilter = mag_filter,
1518 .MinModeFilter = min_filter,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001519 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1520 .AnisotropicAlgorithm = EWAApproximation,
Kristian Høgsberg Kristensena5b49d22015-06-10 23:11:37 -07001521 .MinLOD = pCreateInfo->minLod,
1522 .MaxLOD = pCreateInfo->maxLod,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001523 .ChromaKeyEnable = 0,
1524 .ChromaKeyIndex = 0,
1525 .ChromaKeyMode = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001526 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001527 .CubeSurfaceControlMode = 0,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001528
1529 .IndirectStatePointer =
Jason Ekstrand522ab832015-07-08 11:44:52 -07001530 device->border_colors.offset +
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001531 pCreateInfo->borderColor * sizeof(float) * 4,
Kristian Høgsberg Kristensendc56e4f2015-05-29 16:06:06 -07001532
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001533 .LODClampMagnificationMode = MIPNONE,
Kristian Høgsberg Kristensen76bb6582015-05-31 22:15:34 -07001534 .MaximumAnisotropy = max_anisotropy,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001535 .RAddressMinFilterRoundingEnable = 0,
1536 .RAddressMagFilterRoundingEnable = 0,
1537 .VAddressMinFilterRoundingEnable = 0,
1538 .VAddressMagFilterRoundingEnable = 0,
1539 .UAddressMinFilterRoundingEnable = 0,
1540 .UAddressMagFilterRoundingEnable = 0,
1541 .TrilinearFilterQuality = 0,
1542 .NonnormalizedCoordinateEnable = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001543 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1544 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1545 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001546 };
1547
1548 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1549
1550 *pSampler = (VkSampler) sampler;
1551
1552 return VK_SUCCESS;
1553}
1554
1555// Descriptor set functions
1556
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001557VkResult anv_CreateDescriptorSetLayout(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001558 VkDevice _device,
1559 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1560 VkDescriptorSetLayout* pSetLayout)
1561{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001562 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001563 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001564
1565 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1566
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001567 uint32_t sampler_count[VK_SHADER_STAGE_NUM] = { 0, };
1568 uint32_t surface_count[VK_SHADER_STAGE_NUM] = { 0, };
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001569 uint32_t num_dynamic_buffers = 0;
1570 uint32_t count = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07001571 uint32_t stages = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001572 uint32_t s;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001573
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001574 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001575 switch (pCreateInfo->pBinding[i].descriptorType) {
1576 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001577 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001578 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001579 sampler_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001580 break;
1581 default:
1582 break;
1583 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001584
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001585 switch (pCreateInfo->pBinding[i].descriptorType) {
1586 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001587 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1588 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1589 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1590 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1591 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1592 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1593 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1594 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001595 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001596 surface_count[s] += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001597 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001598 default:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001599 break;
1600 }
1601
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001602 switch (pCreateInfo->pBinding[i].descriptorType) {
1603 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1604 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Jason Ekstrand63c11902015-07-06 17:43:58 -07001605 num_dynamic_buffers += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001606 break;
1607 default:
1608 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001609 }
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001610
Jason Ekstrand22513052015-05-30 10:07:29 -07001611 stages |= pCreateInfo->pBinding[i].stageFlags;
Jason Ekstrand63c11902015-07-06 17:43:58 -07001612 count += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001613 }
1614
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001615 uint32_t sampler_total = 0;
1616 uint32_t surface_total = 0;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001617 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001618 sampler_total += sampler_count[s];
1619 surface_total += surface_count[s];
1620 }
1621
1622 size_t size = sizeof(*set_layout) +
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001623 (sampler_total + surface_total) * sizeof(set_layout->entries[0]);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001624 set_layout = anv_device_alloc(device, size, 8,
1625 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1626 if (!set_layout)
1627 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1628
1629 set_layout->num_dynamic_buffers = num_dynamic_buffers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001630 set_layout->count = count;
Jason Ekstrand22513052015-05-30 10:07:29 -07001631 set_layout->shader_stages = stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001632
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001633 struct anv_descriptor_slot *p = set_layout->entries;
Jason Ekstrand8c5e48f2015-07-06 16:43:28 -07001634 struct anv_descriptor_slot *sampler[VK_SHADER_STAGE_NUM];
1635 struct anv_descriptor_slot *surface[VK_SHADER_STAGE_NUM];
1636 for (uint32_t s = 0; s < VK_SHADER_STAGE_NUM; s++) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001637 set_layout->stage[s].surface_count = surface_count[s];
1638 set_layout->stage[s].surface_start = surface[s] = p;
1639 p += surface_count[s];
1640 set_layout->stage[s].sampler_count = sampler_count[s];
1641 set_layout->stage[s].sampler_start = sampler[s] = p;
1642 p += sampler_count[s];
1643 }
1644
1645 uint32_t descriptor = 0;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001646 int8_t dynamic_slot = 0;
1647 bool is_dynamic;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001648 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1649 switch (pCreateInfo->pBinding[i].descriptorType) {
1650 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001651 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1652 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001653 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001654 sampler[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001655 sampler[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001656 sampler[s]++;
1657 }
1658 break;
1659 default:
1660 break;
1661 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001662
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001663 switch (pCreateInfo->pBinding[i].descriptorType) {
1664 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1665 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001666 is_dynamic = true;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001667 break;
1668 default:
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001669 is_dynamic = false;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001670 break;
1671 }
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001672
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001673 switch (pCreateInfo->pBinding[i].descriptorType) {
1674 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001675 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1676 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1677 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1678 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1679 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1680 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1681 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1682 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1683 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001684 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].arraySize; j++) {
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001685 surface[s]->index = descriptor + j;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001686 if (is_dynamic)
1687 surface[s]->dynamic_slot = dynamic_slot + j;
1688 else
1689 surface[s]->dynamic_slot = -1;
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001690 surface[s]++;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001691 }
1692 break;
1693 default:
Kristian Høgsberg Kristensenfad418f2015-05-27 14:05:50 -07001694 break;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001695 }
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001696
1697 if (is_dynamic)
Jason Ekstrand63c11902015-07-06 17:43:58 -07001698 dynamic_slot += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsberg Kristensen4aecec02015-05-29 11:32:53 -07001699
Jason Ekstrand63c11902015-07-06 17:43:58 -07001700 descriptor += pCreateInfo->pBinding[i].arraySize;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001701 }
1702
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001703 *pSetLayout = (VkDescriptorSetLayout) set_layout;
1704
1705 return VK_SUCCESS;
1706}
1707
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001708VkResult anv_CreateDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001709 VkDevice device,
1710 VkDescriptorPoolUsage poolUsage,
1711 uint32_t maxSets,
1712 const VkDescriptorPoolCreateInfo* pCreateInfo,
1713 VkDescriptorPool* pDescriptorPool)
1714{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001715 *pDescriptorPool = 1;
1716
1717 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001718}
1719
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001720VkResult anv_ResetDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001721 VkDevice device,
1722 VkDescriptorPool descriptorPool)
1723{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001724 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001725}
1726
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001727VkResult anv_AllocDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001728 VkDevice _device,
1729 VkDescriptorPool descriptorPool,
1730 VkDescriptorSetUsage setUsage,
1731 uint32_t count,
1732 const VkDescriptorSetLayout* pSetLayouts,
1733 VkDescriptorSet* pDescriptorSets,
1734 uint32_t* pCount)
1735{
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001736 ANV_FROM_HANDLE(anv_device, device, _device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001737 struct anv_descriptor_set *set;
1738 size_t size;
1739
1740 for (uint32_t i = 0; i < count; i++) {
Jason Ekstrandc8577b52015-07-08 14:24:12 -07001741 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, pSetLayouts[i]);
Kristian Høgsberga77229c2015-05-13 11:49:30 -07001742 size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001743 set = anv_device_alloc(device, size, 8,
1744 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1745 if (!set) {
1746 *pCount = i;
1747 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1748 }
1749
Jason Ekstrand0a547512015-05-21 16:33:04 -07001750 /* Descriptor sets may not be 100% filled out so we need to memset to
1751 * ensure that we can properly detect and handle holes.
1752 */
1753 memset(set, 0, size);
1754
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001755 pDescriptorSets[i] = (VkDescriptorSet) set;
1756 }
1757
1758 *pCount = count;
1759
Kristian Høgsbergb4b3bd12015-05-17 18:39:12 -07001760 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001761}
1762
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001763VkResult anv_UpdateDescriptorSets(
1764 VkDevice device,
1765 uint32_t writeCount,
1766 const VkWriteDescriptorSet* pDescriptorWrites,
1767 uint32_t copyCount,
1768 const VkCopyDescriptorSet* pDescriptorCopies)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001769{
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001770 for (uint32_t i = 0; i < writeCount; i++) {
1771 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1772 ANV_FROM_HANDLE(anv_descriptor_set, set, write->destSet);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001773
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001774 switch (write->descriptorType) {
1775 case VK_DESCRIPTOR_TYPE_SAMPLER:
1776 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1777 for (uint32_t j = 0; j < write->count; j++) {
1778 set->descriptors[write->destBinding + j].sampler =
1779 (struct anv_sampler *) write->pDescriptors[j].sampler;
1780 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001781
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001782 if (write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER)
1783 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001784
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001785 /* fallthrough */
1786
1787 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1788 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1789 for (uint32_t j = 0; j < write->count; j++) {
1790 set->descriptors[write->destBinding + j].view =
1791 (struct anv_surface_view *) write->pDescriptors[j].imageView;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001792 }
1793 break;
1794
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001795 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1796 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1797 anv_finishme("texel buffers not implemented");
1798 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001799
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001800 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1801 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1802 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1803 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1804 for (uint32_t j = 0; j < write->count; j++) {
1805 set->descriptors[write->destBinding + j].view =
1806 (struct anv_surface_view *) write->pDescriptors[j].bufferView;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001807 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001808
1809 default:
1810 break;
1811 }
1812 }
Jason Ekstrandd29ec8f2015-07-08 14:24:56 -07001813
1814 for (uint32_t i = 0; i < copyCount; i++) {
1815 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1816 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->destSet);
1817 ANV_FROM_HANDLE(anv_descriptor_set, dest, copy->destSet);
1818 for (uint32_t j = 0; j < copy->count; j++) {
1819 dest->descriptors[copy->destBinding + j] =
1820 src->descriptors[copy->srcBinding + j];
1821 }
1822 }
1823
1824 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001825}
1826
1827// State object functions
1828
1829static inline int64_t
1830clamp_int64(int64_t x, int64_t min, int64_t max)
1831{
1832 if (x < min)
1833 return min;
1834 else if (x < max)
1835 return x;
1836 else
1837 return max;
1838}
1839
Jason Ekstrand57153da2015-05-22 15:15:08 -07001840static void
1841anv_dynamic_vp_state_destroy(struct anv_device *device,
1842 struct anv_object *object,
1843 VkObjectType obj_type)
1844{
1845 struct anv_dynamic_vp_state *state = (void *)object;
1846
1847 assert(obj_type == VK_OBJECT_TYPE_DYNAMIC_VP_STATE);
1848
1849 anv_state_pool_free(&device->dynamic_state_pool, state->sf_clip_vp);
1850 anv_state_pool_free(&device->dynamic_state_pool, state->cc_vp);
1851 anv_state_pool_free(&device->dynamic_state_pool, state->scissor);
1852
1853 anv_device_free(device, state);
1854}
1855
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001856VkResult anv_CreateDynamicViewportState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001857 VkDevice _device,
1858 const VkDynamicVpStateCreateInfo* pCreateInfo,
1859 VkDynamicVpState* pState)
1860{
1861 struct anv_device *device = (struct anv_device *) _device;
1862 struct anv_dynamic_vp_state *state;
1863
1864 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO);
1865
1866 state = anv_device_alloc(device, sizeof(*state), 8,
1867 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1868 if (state == NULL)
1869 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1870
Jason Ekstrand57153da2015-05-22 15:15:08 -07001871 state->base.destructor = anv_dynamic_vp_state_destroy;
1872
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001873 unsigned count = pCreateInfo->viewportAndScissorCount;
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001874 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001875 count * 64, 64);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001876 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001877 count * 8, 32);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001878 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001879 count * 32, 32);
1880
1881 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1882 const VkViewport *vp = &pCreateInfo->pViewports[i];
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07001883 const VkRect2D *s = &pCreateInfo->pScissors[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001884
1885 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1886 .ViewportMatrixElementm00 = vp->width / 2,
1887 .ViewportMatrixElementm11 = vp->height / 2,
1888 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1889 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1890 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1891 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1892 .XMinClipGuardband = -1.0f,
1893 .XMaxClipGuardband = 1.0f,
1894 .YMinClipGuardband = -1.0f,
1895 .YMaxClipGuardband = 1.0f,
1896 .XMinViewPort = vp->originX,
1897 .XMaxViewPort = vp->originX + vp->width - 1,
1898 .YMinViewPort = vp->originY,
1899 .YMaxViewPort = vp->originY + vp->height - 1,
1900 };
1901
1902 struct GEN8_CC_VIEWPORT cc_viewport = {
1903 .MinimumDepth = vp->minDepth,
1904 .MaximumDepth = vp->maxDepth
1905 };
1906
1907 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1908 * ymax < ymin for empty clips. In case clip x, y, width height are all
1909 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1910 * what we want. Just special case empty clips and produce a canonical
1911 * empty clip. */
1912 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1913 .ScissorRectangleYMin = 1,
1914 .ScissorRectangleXMin = 1,
1915 .ScissorRectangleYMax = 0,
1916 .ScissorRectangleXMax = 0
1917 };
1918
1919 const int max = 0xffff;
1920 struct GEN8_SCISSOR_RECT scissor = {
1921 /* Do this math using int64_t so overflow gets clamped correctly. */
1922 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
1923 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
1924 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
1925 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
1926 };
1927
1928 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
1929 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
1930
1931 if (s->extent.width <= 0 || s->extent.height <= 0) {
1932 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
1933 } else {
1934 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
1935 }
1936 }
1937
1938 *pState = (VkDynamicVpState) state;
1939
1940 return VK_SUCCESS;
1941}
1942
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001943VkResult anv_CreateDynamicRasterState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001944 VkDevice _device,
1945 const VkDynamicRsStateCreateInfo* pCreateInfo,
1946 VkDynamicRsState* pState)
1947{
1948 struct anv_device *device = (struct anv_device *) _device;
1949 struct anv_dynamic_rs_state *state;
1950
1951 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RS_STATE_CREATE_INFO);
1952
1953 state = anv_device_alloc(device, sizeof(*state), 8,
1954 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1955 if (state == NULL)
1956 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1957
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001958 struct GEN8_3DSTATE_SF sf = {
1959 GEN8_3DSTATE_SF_header,
1960 .LineWidth = pCreateInfo->lineWidth,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001961 };
1962
1963 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
1964
Kristian Høgsberg99883772015-05-26 09:40:10 -07001965 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
1966 pCreateInfo->slopeScaledDepthBias != 0.0f;
1967 struct GEN8_3DSTATE_RASTER raster = {
1968 .GlobalDepthOffsetEnableSolid = enable_bias,
1969 .GlobalDepthOffsetEnableWireframe = enable_bias,
1970 .GlobalDepthOffsetEnablePoint = enable_bias,
1971 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
1972 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
1973 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
1974 };
1975
1976 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
1977
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001978 *pState = (VkDynamicRsState) state;
1979
1980 return VK_SUCCESS;
1981}
1982
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001983VkResult anv_CreateDynamicColorBlendState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001984 VkDevice _device,
1985 const VkDynamicCbStateCreateInfo* pCreateInfo,
1986 VkDynamicCbState* pState)
1987{
1988 struct anv_device *device = (struct anv_device *) _device;
1989 struct anv_dynamic_cb_state *state;
1990
1991 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_CB_STATE_CREATE_INFO);
1992
1993 state = anv_device_alloc(device, sizeof(*state), 8,
1994 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1995 if (state == NULL)
1996 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1997
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07001998 struct GEN8_COLOR_CALC_STATE color_calc_state = {
1999 .BlendConstantColorRed = pCreateInfo->blendConst[0],
2000 .BlendConstantColorGreen = pCreateInfo->blendConst[1],
2001 .BlendConstantColorBlue = pCreateInfo->blendConst[2],
2002 .BlendConstantColorAlpha = pCreateInfo->blendConst[3]
2003 };
2004
2005 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2006
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002007 *pState = (VkDynamicCbState) state;
2008
2009 return VK_SUCCESS;
2010}
2011
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002012VkResult anv_CreateDynamicDepthStencilState(
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002013 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002014 const VkDynamicDsStateCreateInfo* pCreateInfo,
2015 VkDynamicDsState* pState)
2016{
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002017 struct anv_device *device = (struct anv_device *) _device;
2018 struct anv_dynamic_ds_state *state;
2019
2020 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DS_STATE_CREATE_INFO);
2021
2022 state = anv_device_alloc(device, sizeof(*state), 8,
2023 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2024 if (state == NULL)
2025 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2026
2027 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2028 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2029
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002030 /* Is this what we need to do? */
2031 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2032
Jason Ekstrand251aea82015-06-03 16:59:13 -07002033 .StencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2034 .StencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002035
Jason Ekstrand251aea82015-06-03 16:59:13 -07002036 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask & 0xff,
2037 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask & 0xff,
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002038 };
2039
2040 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2041 &wm_depth_stencil);
2042
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002043 struct GEN8_COLOR_CALC_STATE color_calc_state = {
2044 .StencilReferenceValue = pCreateInfo->stencilFrontRef,
2045 .BackFaceStencilReferenceValue = pCreateInfo->stencilBackRef
2046 };
2047
2048 GEN8_COLOR_CALC_STATE_pack(NULL, state->state_color_calc, &color_calc_state);
2049
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002050 *pState = (VkDynamicDsState) state;
2051
2052 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002053}
2054
2055// Command buffer functions
2056
Jason Ekstrand57153da2015-05-22 15:15:08 -07002057static void
2058anv_cmd_buffer_destroy(struct anv_device *device,
2059 struct anv_object *object,
2060 VkObjectType obj_type)
2061{
2062 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) object;
2063
2064 assert(obj_type == VK_OBJECT_TYPE_COMMAND_BUFFER);
2065
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002066 /* Destroy all of the batch buffers */
2067 struct anv_batch_bo *bbo = cmd_buffer->last_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002068 while (bbo) {
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002069 struct anv_batch_bo *prev = bbo->prev_batch_bo;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002070 anv_batch_bo_destroy(bbo, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002071 bbo = prev;
2072 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002073 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
Jason Ekstrand2dc0f7f2015-05-28 13:08:21 -07002074
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002075 /* Destroy all of the surface state buffers */
2076 bbo = cmd_buffer->surface_batch_bo;
Jason Ekstrand999b56c2015-06-09 11:40:22 -07002077 while (bbo) {
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002078 struct anv_batch_bo *prev = bbo->prev_batch_bo;
2079 anv_batch_bo_destroy(bbo, device);
2080 bbo = prev;
2081 }
Jason Ekstrand403266b2015-05-25 17:38:15 -07002082 anv_reloc_list_finish(&cmd_buffer->surface_relocs, device);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002083
Jason Ekstrand57153da2015-05-22 15:15:08 -07002084 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
2085 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
Jason Ekstrand57153da2015-05-22 15:15:08 -07002086 anv_device_free(device, cmd_buffer->exec2_objects);
2087 anv_device_free(device, cmd_buffer->exec2_bos);
2088 anv_device_free(device, cmd_buffer);
2089}
2090
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002091static VkResult
2092anv_cmd_buffer_chain_batch(struct anv_batch *batch, void *_data)
2093{
2094 struct anv_cmd_buffer *cmd_buffer = _data;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002095
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002096 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->last_batch_bo;
2097
2098 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2099 if (result != VK_SUCCESS)
2100 return result;
2101
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002102 /* We set the end of the batch a little short so we would be sure we
2103 * have room for the chaining command. Since we're about to emit the
2104 * chaining command, let's set it back where it should go.
2105 */
2106 batch->end += GEN8_MI_BATCH_BUFFER_START_length * 4;
2107 assert(batch->end == old_bbo->bo.map + old_bbo->bo.size);
2108
2109 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_START,
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002110 GEN8_MI_BATCH_BUFFER_START_header,
2111 ._2ndLevelBatchBuffer = _1stlevelbatch,
2112 .AddressSpaceIndicator = ASI_PPGTT,
2113 .BatchBufferStartAddress = { &new_bbo->bo, 0 },
Jason Ekstrand468c89a2015-05-28 15:25:02 -07002114 );
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002115
2116 /* Pad out to a 2-dword aligned boundary with zeros */
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002117 if ((uintptr_t)batch->next % 8 != 0) {
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002118 *(uint32_t *)batch->next = 0;
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002119 batch->next += 4;
2120 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002121
2122 anv_batch_bo_finish(cmd_buffer->last_batch_bo, batch);
2123
2124 new_bbo->prev_batch_bo = old_bbo;
2125 cmd_buffer->last_batch_bo = new_bbo;
2126
2127 anv_batch_bo_start(new_bbo, batch, GEN8_MI_BATCH_BUFFER_START_length * 4);
2128
2129 return VK_SUCCESS;
2130}
2131
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002132VkResult anv_CreateCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002133 VkDevice _device,
2134 const VkCmdBufferCreateInfo* pCreateInfo,
2135 VkCmdBuffer* pCmdBuffer)
2136{
2137 struct anv_device *device = (struct anv_device *) _device;
2138 struct anv_cmd_buffer *cmd_buffer;
2139 VkResult result;
2140
Jason Ekstrande19d6be2015-07-08 10:53:32 -07002141 assert(pCreateInfo->level == VK_CMD_BUFFER_LEVEL_PRIMARY);
2142
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002143 cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
2144 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2145 if (cmd_buffer == NULL)
2146 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2147
Jason Ekstrand57153da2015-05-22 15:15:08 -07002148 cmd_buffer->base.destructor = anv_cmd_buffer_destroy;
2149
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002150 cmd_buffer->device = device;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002151 cmd_buffer->rs_state = NULL;
2152 cmd_buffer->vp_state = NULL;
Kristian Høgsberg Kristensen5744d172015-06-02 22:51:42 -07002153 cmd_buffer->cb_state = NULL;
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002154 cmd_buffer->ds_state = NULL;
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002155 memset(&cmd_buffer->state_vf, 0, sizeof(cmd_buffer->state_vf));
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002156 memset(&cmd_buffer->descriptors, 0, sizeof(cmd_buffer->descriptors));
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002157
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002158 result = anv_batch_bo_create(device, &cmd_buffer->last_batch_bo);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002159 if (result != VK_SUCCESS)
2160 goto fail;
2161
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002162 result = anv_reloc_list_init(&cmd_buffer->batch.relocs, device);
2163 if (result != VK_SUCCESS)
2164 goto fail_batch_bo;
2165
2166 cmd_buffer->batch.device = device;
2167 cmd_buffer->batch.extend_cb = anv_cmd_buffer_chain_batch;
2168 cmd_buffer->batch.user_data = cmd_buffer;
2169
2170 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2171 GEN8_MI_BATCH_BUFFER_START_length * 4);
2172
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002173 result = anv_batch_bo_create(device, &cmd_buffer->surface_batch_bo);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002174 if (result != VK_SUCCESS)
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002175 goto fail_batch_relocs;
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002176 cmd_buffer->surface_batch_bo->first_reloc = 0;
2177
2178 result = anv_reloc_list_init(&cmd_buffer->surface_relocs, device);
2179 if (result != VK_SUCCESS)
2180 goto fail_ss_batch_bo;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002181
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002182 /* Start surface_next at 1 so surface offset 0 is invalid. */
2183 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002184
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002185 cmd_buffer->exec2_objects = NULL;
2186 cmd_buffer->exec2_bos = NULL;
2187 cmd_buffer->exec2_array_length = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002188
2189 anv_state_stream_init(&cmd_buffer->surface_state_stream,
2190 &device->surface_state_block_pool);
Kristian Høgsberga1ec7892015-05-13 13:51:08 -07002191 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002192 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002193
2194 cmd_buffer->dirty = 0;
2195 cmd_buffer->vb_dirty = 0;
Jason Ekstrand22513052015-05-30 10:07:29 -07002196 cmd_buffer->descriptors_dirty = 0;
Jason Ekstrandae8c93e2015-05-25 17:08:11 -07002197 cmd_buffer->pipeline = NULL;
Kristian Høgsberg Kristensen5a317ef2015-05-27 21:45:23 -07002198 cmd_buffer->vp_state = NULL;
2199 cmd_buffer->rs_state = NULL;
2200 cmd_buffer->ds_state = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002201
2202 *pCmdBuffer = (VkCmdBuffer) cmd_buffer;
2203
2204 return VK_SUCCESS;
2205
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002206 fail_ss_batch_bo:
2207 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, device);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002208 fail_batch_relocs:
2209 anv_reloc_list_finish(&cmd_buffer->batch.relocs, device);
2210 fail_batch_bo:
2211 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, device);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002212 fail:
2213 anv_device_free(device, cmd_buffer);
2214
2215 return result;
2216}
2217
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002218static void
2219anv_cmd_buffer_emit_state_base_address(struct anv_cmd_buffer *cmd_buffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002220{
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002221 struct anv_device *device = cmd_buffer->device;
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002222 struct anv_bo *scratch_bo = NULL;
2223
2224 cmd_buffer->scratch_size = device->scratch_block_pool.size;
2225 if (cmd_buffer->scratch_size > 0)
2226 scratch_bo = &device->scratch_block_pool.bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002227
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002228 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_BASE_ADDRESS,
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07002229 .GeneralStateBaseAddress = { scratch_bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002230 .GeneralStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002231 .GeneralStateBaseAddressModifyEnable = true,
2232 .GeneralStateBufferSize = 0xfffff,
2233 .GeneralStateBufferSizeModifyEnable = true,
2234
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002235 .SurfaceStateBaseAddress = { &cmd_buffer->surface_batch_bo->bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002236 .SurfaceStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002237 .SurfaceStateBaseAddressModifyEnable = true,
2238
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002239 .DynamicStateBaseAddress = { &device->dynamic_state_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002240 .DynamicStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002241 .DynamicStateBaseAddressModifyEnable = true,
2242 .DynamicStateBufferSize = 0xfffff,
2243 .DynamicStateBufferSizeModifyEnable = true,
2244
2245 .IndirectObjectBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002246 .IndirectObjectMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002247 .IndirectObjectBaseAddressModifyEnable = true,
2248 .IndirectObjectBufferSize = 0xfffff,
2249 .IndirectObjectBufferSizeModifyEnable = true,
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002250
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002251 .InstructionBaseAddress = { &device->instruction_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002252 .InstructionMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002253 .InstructionBaseAddressModifyEnable = true,
2254 .InstructionBufferSize = 0xfffff,
2255 .InstructionBuffersizeModifyEnable = true);
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002256}
2257
2258VkResult anv_BeginCommandBuffer(
2259 VkCmdBuffer cmdBuffer,
2260 const VkCmdBufferBeginInfo* pBeginInfo)
2261{
2262 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2263
Jason Ekstrand9ffc1be2015-05-28 15:34:08 -07002264 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07002265 cmd_buffer->current_pipeline = UINT32_MAX;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002266
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002267 return VK_SUCCESS;
2268}
2269
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002270static VkResult
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002271anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002272 struct anv_bo *bo,
2273 struct drm_i915_gem_relocation_entry *relocs,
2274 size_t num_relocs)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002275{
2276 struct drm_i915_gem_exec_object2 *obj;
2277
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002278 if (bo->index < cmd_buffer->bo_count &&
2279 cmd_buffer->exec2_bos[bo->index] == bo)
2280 return VK_SUCCESS;
2281
2282 if (cmd_buffer->bo_count >= cmd_buffer->exec2_array_length) {
2283 uint32_t new_len = cmd_buffer->exec2_objects ?
2284 cmd_buffer->exec2_array_length * 2 : 64;
2285
2286 struct drm_i915_gem_exec_object2 *new_objects =
2287 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_objects),
2288 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2289 if (new_objects == NULL)
2290 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2291
2292 struct anv_bo **new_bos =
2293 anv_device_alloc(cmd_buffer->device, new_len * sizeof(*new_bos),
2294 8, VK_SYSTEM_ALLOC_TYPE_INTERNAL);
2295 if (new_objects == NULL) {
2296 anv_device_free(cmd_buffer->device, new_objects);
2297 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2298 }
2299
2300 if (cmd_buffer->exec2_objects) {
2301 memcpy(new_objects, cmd_buffer->exec2_objects,
2302 cmd_buffer->bo_count * sizeof(*new_objects));
2303 memcpy(new_bos, cmd_buffer->exec2_bos,
2304 cmd_buffer->bo_count * sizeof(*new_bos));
2305 }
2306
2307 cmd_buffer->exec2_objects = new_objects;
2308 cmd_buffer->exec2_bos = new_bos;
2309 cmd_buffer->exec2_array_length = new_len;
2310 }
2311
2312 assert(cmd_buffer->bo_count < cmd_buffer->exec2_array_length);
2313
2314 bo->index = cmd_buffer->bo_count++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002315 obj = &cmd_buffer->exec2_objects[bo->index];
2316 cmd_buffer->exec2_bos[bo->index] = bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002317
2318 obj->handle = bo->gem_handle;
2319 obj->relocation_count = 0;
2320 obj->relocs_ptr = 0;
2321 obj->alignment = 0;
2322 obj->offset = bo->offset;
2323 obj->flags = 0;
2324 obj->rsvd1 = 0;
2325 obj->rsvd2 = 0;
2326
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002327 if (relocs) {
2328 obj->relocation_count = num_relocs;
2329 obj->relocs_ptr = (uintptr_t) relocs;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002330 }
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002331
2332 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002333}
2334
2335static void
2336anv_cmd_buffer_add_validate_bos(struct anv_cmd_buffer *cmd_buffer,
2337 struct anv_reloc_list *list)
2338{
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002339 for (size_t i = 0; i < list->num_relocs; i++)
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002340 anv_cmd_buffer_add_bo(cmd_buffer, list->reloc_bos[i], NULL, 0);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002341}
2342
2343static void
2344anv_cmd_buffer_process_relocs(struct anv_cmd_buffer *cmd_buffer,
2345 struct anv_reloc_list *list)
2346{
2347 struct anv_bo *bo;
2348
2349 /* If the kernel supports I915_EXEC_NO_RELOC, it will compare offset in
2350 * struct drm_i915_gem_exec_object2 against the bos current offset and if
2351 * all bos haven't moved it will skip relocation processing alltogether.
2352 * If I915_EXEC_NO_RELOC is not supported, the kernel ignores the incoming
2353 * value of offset so we can set it either way. For that to work we need
2354 * to make sure all relocs use the same presumed offset.
2355 */
2356
2357 for (size_t i = 0; i < list->num_relocs; i++) {
2358 bo = list->reloc_bos[i];
2359 if (bo->offset != list->relocs[i].presumed_offset)
2360 cmd_buffer->need_reloc = true;
2361
2362 list->relocs[i].target_handle = bo->index;
2363 }
2364}
2365
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002366VkResult anv_EndCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002367 VkCmdBuffer cmdBuffer)
2368{
2369 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2370 struct anv_device *device = cmd_buffer->device;
2371 struct anv_batch *batch = &cmd_buffer->batch;
2372
2373 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
2374
2375 /* Round batch up to an even number of dwords. */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002376 if ((batch->next - batch->start) & 4)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002377 anv_batch_emit(batch, GEN8_MI_NOOP);
2378
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002379 anv_batch_bo_finish(cmd_buffer->last_batch_bo, &cmd_buffer->batch);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002380 cmd_buffer->surface_batch_bo->num_relocs =
2381 cmd_buffer->surface_relocs.num_relocs - cmd_buffer->surface_batch_bo->first_reloc;
2382 cmd_buffer->surface_batch_bo->length = cmd_buffer->surface_next;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002383
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002384 cmd_buffer->bo_count = 0;
2385 cmd_buffer->need_reloc = false;
2386
2387 /* Lock for access to bo->index. */
2388 pthread_mutex_lock(&device->mutex);
2389
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002390 /* Add surface state bos first so we can add them with their relocs. */
2391 for (struct anv_batch_bo *bbo = cmd_buffer->surface_batch_bo;
2392 bbo != NULL; bbo = bbo->prev_batch_bo) {
2393 anv_cmd_buffer_add_bo(cmd_buffer, &bbo->bo,
2394 &cmd_buffer->surface_relocs.relocs[bbo->first_reloc],
2395 bbo->num_relocs);
2396 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002397
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002398 /* Add all of the BOs referenced by surface state */
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002399 anv_cmd_buffer_add_validate_bos(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002400
2401 /* Add all but the first batch BO */
2402 struct anv_batch_bo *batch_bo = cmd_buffer->last_batch_bo;
2403 while (batch_bo->prev_batch_bo) {
2404 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2405 &batch->relocs.relocs[batch_bo->first_reloc],
2406 batch_bo->num_relocs);
2407 batch_bo = batch_bo->prev_batch_bo;
2408 }
2409
2410 /* Add everything referenced by the batches */
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002411 anv_cmd_buffer_add_validate_bos(cmd_buffer, &batch->relocs);
Jason Ekstrand730ca0e2015-05-28 10:20:18 -07002412
2413 /* Add the first batch bo last */
2414 assert(batch_bo->prev_batch_bo == NULL && batch_bo->first_reloc == 0);
2415 anv_cmd_buffer_add_bo(cmd_buffer, &batch_bo->bo,
2416 &batch->relocs.relocs[batch_bo->first_reloc],
2417 batch_bo->num_relocs);
2418 assert(batch_bo->bo.index == cmd_buffer->bo_count - 1);
2419
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002420 anv_cmd_buffer_process_relocs(cmd_buffer, &cmd_buffer->surface_relocs);
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002421 anv_cmd_buffer_process_relocs(cmd_buffer, &batch->relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002422
2423 cmd_buffer->execbuf.buffers_ptr = (uintptr_t) cmd_buffer->exec2_objects;
2424 cmd_buffer->execbuf.buffer_count = cmd_buffer->bo_count;
2425 cmd_buffer->execbuf.batch_start_offset = 0;
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002426 cmd_buffer->execbuf.batch_len = batch->next - batch->start;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002427 cmd_buffer->execbuf.cliprects_ptr = 0;
2428 cmd_buffer->execbuf.num_cliprects = 0;
2429 cmd_buffer->execbuf.DR1 = 0;
2430 cmd_buffer->execbuf.DR4 = 0;
2431
2432 cmd_buffer->execbuf.flags = I915_EXEC_HANDLE_LUT;
2433 if (!cmd_buffer->need_reloc)
2434 cmd_buffer->execbuf.flags |= I915_EXEC_NO_RELOC;
2435 cmd_buffer->execbuf.flags |= I915_EXEC_RENDER;
2436 cmd_buffer->execbuf.rsvd1 = device->context_id;
2437 cmd_buffer->execbuf.rsvd2 = 0;
2438
2439 pthread_mutex_unlock(&device->mutex);
2440
2441 return VK_SUCCESS;
2442}
2443
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002444VkResult anv_ResetCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002445 VkCmdBuffer cmdBuffer)
2446{
2447 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2448
Jason Ekstrandda8f1482015-05-27 11:42:55 -07002449 /* Delete all but the first batch bo */
2450 while (cmd_buffer->last_batch_bo->prev_batch_bo) {
2451 struct anv_batch_bo *prev = cmd_buffer->last_batch_bo->prev_batch_bo;
2452 anv_batch_bo_destroy(cmd_buffer->last_batch_bo, cmd_buffer->device);
2453 cmd_buffer->last_batch_bo = prev;
2454 }
2455 assert(cmd_buffer->last_batch_bo->prev_batch_bo == NULL);
2456
2457 cmd_buffer->batch.relocs.num_relocs = 0;
2458 anv_batch_bo_start(cmd_buffer->last_batch_bo, &cmd_buffer->batch,
2459 GEN8_MI_BATCH_BUFFER_START_length * 4);
2460
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002461 /* Delete all but the first batch bo */
2462 while (cmd_buffer->surface_batch_bo->prev_batch_bo) {
2463 struct anv_batch_bo *prev = cmd_buffer->surface_batch_bo->prev_batch_bo;
2464 anv_batch_bo_destroy(cmd_buffer->surface_batch_bo, cmd_buffer->device);
2465 cmd_buffer->surface_batch_bo = prev;
2466 }
2467 assert(cmd_buffer->surface_batch_bo->prev_batch_bo == NULL);
2468
2469 cmd_buffer->surface_next = 1;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002470 cmd_buffer->surface_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002471
Jason Ekstrand5d4b6a02015-06-09 16:27:55 -07002472 cmd_buffer->rs_state = NULL;
2473 cmd_buffer->vp_state = NULL;
2474 cmd_buffer->cb_state = NULL;
2475 cmd_buffer->ds_state = NULL;
2476
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002477 return VK_SUCCESS;
2478}
2479
2480// Command buffer building functions
2481
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002482void anv_CmdBindPipeline(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002483 VkCmdBuffer cmdBuffer,
2484 VkPipelineBindPoint pipelineBindPoint,
2485 VkPipeline _pipeline)
2486{
2487 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002488 struct anv_pipeline *pipeline = (struct anv_pipeline *) _pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002489
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002490 switch (pipelineBindPoint) {
2491 case VK_PIPELINE_BIND_POINT_COMPUTE:
2492 cmd_buffer->compute_pipeline = pipeline;
2493 cmd_buffer->compute_dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2494 break;
2495
2496 case VK_PIPELINE_BIND_POINT_GRAPHICS:
2497 cmd_buffer->pipeline = pipeline;
2498 cmd_buffer->vb_dirty |= pipeline->vb_used;
2499 cmd_buffer->dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2500 break;
2501
2502 default:
2503 assert(!"invalid bind point");
2504 break;
2505 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002506}
2507
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002508void anv_CmdBindDynamicStateObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002509 VkCmdBuffer cmdBuffer,
2510 VkStateBindPoint stateBindPoint,
2511 VkDynamicStateObject dynamicState)
2512{
2513 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002514
2515 switch (stateBindPoint) {
2516 case VK_STATE_BIND_POINT_VIEWPORT:
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07002517 cmd_buffer->vp_state = (struct anv_dynamic_vp_state *) dynamicState;
2518 cmd_buffer->dirty |= ANV_CMD_BUFFER_VP_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002519 break;
2520 case VK_STATE_BIND_POINT_RASTER:
2521 cmd_buffer->rs_state = (struct anv_dynamic_rs_state *) dynamicState;
2522 cmd_buffer->dirty |= ANV_CMD_BUFFER_RS_DIRTY;
2523 break;
2524 case VK_STATE_BIND_POINT_COLOR_BLEND:
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07002525 cmd_buffer->cb_state = (struct anv_dynamic_cb_state *) dynamicState;
2526 cmd_buffer->dirty |= ANV_CMD_BUFFER_CB_DIRTY;
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002527 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002528 case VK_STATE_BIND_POINT_DEPTH_STENCIL:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002529 cmd_buffer->ds_state = (struct anv_dynamic_ds_state *) dynamicState;
2530 cmd_buffer->dirty |= ANV_CMD_BUFFER_DS_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002531 break;
2532 default:
2533 break;
2534 };
2535}
2536
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002537static struct anv_state
2538anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer *cmd_buffer,
2539 uint32_t size, uint32_t alignment)
2540{
2541 struct anv_state state;
2542
Chad Versace55752fe2015-06-26 15:07:59 -07002543 state.offset = align_u32(cmd_buffer->surface_next, alignment);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002544 if (state.offset + size > cmd_buffer->surface_batch_bo->bo.size)
2545 return (struct anv_state) { 0 };
2546
2547 state.map = cmd_buffer->surface_batch_bo->bo.map + state.offset;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002548 state.alloc_size = size;
2549 cmd_buffer->surface_next = state.offset + size;
2550
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002551 assert(state.offset + size <= cmd_buffer->surface_batch_bo->bo.size);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002552
2553 return state;
2554}
2555
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002556static VkResult
2557anv_cmd_buffer_new_surface_state_bo(struct anv_cmd_buffer *cmd_buffer)
2558{
2559 struct anv_batch_bo *new_bbo, *old_bbo = cmd_buffer->surface_batch_bo;
2560
2561 /* Finish off the old buffer */
2562 old_bbo->num_relocs =
2563 cmd_buffer->surface_relocs.num_relocs - old_bbo->first_reloc;
2564 old_bbo->length = cmd_buffer->surface_next;
2565
2566 VkResult result = anv_batch_bo_create(cmd_buffer->device, &new_bbo);
2567 if (result != VK_SUCCESS)
2568 return result;
2569
2570 new_bbo->first_reloc = cmd_buffer->surface_relocs.num_relocs;
2571 cmd_buffer->surface_next = 1;
2572
2573 new_bbo->prev_batch_bo = old_bbo;
2574 cmd_buffer->surface_batch_bo = new_bbo;
2575
2576 /* Re-emit state base addresses so we get the new surface state base
2577 * address before we start emitting binding tables etc.
2578 */
2579 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
2580
Jason Ekstrande497ac22015-05-30 18:04:48 -07002581 /* It seems like just changing the state base addresses isn't enough.
2582 * Invalidating the cache seems to be enough to cause things to
2583 * propagate. However, I'm not 100% sure what we're supposed to do.
Jason Ekstrand33cccbb2015-05-30 08:02:52 -07002584 */
2585 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
2586 .TextureCacheInvalidationEnable = true);
2587
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002588 return VK_SUCCESS;
2589}
2590
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002591void anv_CmdBindDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002592 VkCmdBuffer cmdBuffer,
2593 VkPipelineBindPoint pipelineBindPoint,
Jason Ekstrand435b0622015-07-07 17:06:10 -07002594 VkPipelineLayout _layout,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002595 uint32_t firstSet,
2596 uint32_t setCount,
2597 const VkDescriptorSet* pDescriptorSets,
2598 uint32_t dynamicOffsetCount,
2599 const uint32_t* pDynamicOffsets)
2600{
2601 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand435b0622015-07-07 17:06:10 -07002602 struct anv_pipeline_layout *layout = (struct anv_pipeline_layout *) _layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002603 struct anv_descriptor_set *set;
2604 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002605
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002606 assert(firstSet + setCount < MAX_SETS);
2607
2608 uint32_t dynamic_slot = 0;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002609 for (uint32_t i = 0; i < setCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002610 set = (struct anv_descriptor_set *) pDescriptorSets[i];
2611 set_layout = layout->set[firstSet + i].layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002612
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002613 cmd_buffer->descriptors[firstSet + i].set = set;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002614
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002615 assert(set_layout->num_dynamic_buffers <
2616 ARRAY_SIZE(cmd_buffer->descriptors[0].dynamic_offsets));
2617 memcpy(cmd_buffer->descriptors[firstSet + i].dynamic_offsets,
2618 pDynamicOffsets + dynamic_slot,
2619 set_layout->num_dynamic_buffers * sizeof(*pDynamicOffsets));
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002620
Jason Ekstrand22513052015-05-30 10:07:29 -07002621 cmd_buffer->descriptors_dirty |= set_layout->shader_stages;
2622
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002623 dynamic_slot += set_layout->num_dynamic_buffers;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002624 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002625}
2626
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002627void anv_CmdBindIndexBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002628 VkCmdBuffer cmdBuffer,
2629 VkBuffer _buffer,
2630 VkDeviceSize offset,
2631 VkIndexType indexType)
2632{
2633 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2634 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
2635
2636 static const uint32_t vk_to_gen_index_type[] = {
Kristian Høgsberg Kristensen5caa4082015-05-31 22:35:11 -07002637 [VK_INDEX_TYPE_UINT16] = INDEX_WORD,
2638 [VK_INDEX_TYPE_UINT32] = INDEX_DWORD,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002639 };
2640
Jason Ekstrand7fbed522015-07-07 15:11:56 -07002641 struct GEN8_3DSTATE_VF vf = {
2642 GEN8_3DSTATE_VF_header,
2643 .CutIndex = (indexType == VK_INDEX_TYPE_UINT16) ? UINT16_MAX : UINT32_MAX,
2644 };
2645 GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state_vf, &vf);
2646
2647 cmd_buffer->dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY;
2648
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002649 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
2650 .IndexFormat = vk_to_gen_index_type[indexType],
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002651 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002652 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002653 .BufferSize = buffer->size - offset);
2654}
2655
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002656void anv_CmdBindVertexBuffers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002657 VkCmdBuffer cmdBuffer,
2658 uint32_t startBinding,
2659 uint32_t bindingCount,
2660 const VkBuffer* pBuffers,
2661 const VkDeviceSize* pOffsets)
2662{
2663 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002664 struct anv_vertex_binding *vb = cmd_buffer->vertex_bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002665
2666 /* We have to defer setting up vertex buffer since we need the buffer
2667 * stride from the pipeline. */
2668
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002669 assert(startBinding + bindingCount < MAX_VBS);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002670 for (uint32_t i = 0; i < bindingCount; i++) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002671 vb[startBinding + i].buffer = (struct anv_buffer *) pBuffers[i];
2672 vb[startBinding + i].offset = pOffsets[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002673 cmd_buffer->vb_dirty |= 1 << (startBinding + i);
2674 }
2675}
2676
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002677static VkResult
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002678cmd_buffer_emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002679 unsigned stage, struct anv_state *bt_state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002680{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002681 struct anv_pipeline_layout *layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002682 uint32_t color_attachments, bias, size;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002683
2684 if (stage == VK_SHADER_STAGE_COMPUTE)
2685 layout = cmd_buffer->compute_pipeline->layout;
2686 else
2687 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002688
2689 if (stage == VK_SHADER_STAGE_FRAGMENT) {
2690 bias = MAX_RTS;
2691 color_attachments = cmd_buffer->framebuffer->color_attachment_count;
2692 } else {
2693 bias = 0;
2694 color_attachments = 0;
2695 }
2696
2697 /* This is a little awkward: layout can be NULL but we still have to
2698 * allocate and set a binding table for the PS stage for render
2699 * targets. */
2700 uint32_t surface_count = layout ? layout->stage[stage].surface_count : 0;
2701
2702 if (color_attachments + surface_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002703 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002704
2705 size = (bias + surface_count) * sizeof(uint32_t);
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002706 *bt_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
2707 uint32_t *bt_map = bt_state->map;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002708
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002709 if (bt_state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002710 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2711
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002712 for (uint32_t ca = 0; ca < color_attachments; ca++) {
2713 const struct anv_surface_view *view =
2714 cmd_buffer->framebuffer->color_attachments[ca];
2715
2716 struct anv_state state =
2717 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2718
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002719 if (state.map == NULL)
2720 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2721
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002722 memcpy(state.map, view->surface_state.map, 64);
2723
2724 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2725 *(uint64_t *)(state.map + 8 * 4) =
2726 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2727 cmd_buffer->device,
2728 state.offset + 8 * 4,
2729 view->bo, view->offset);
2730
2731 bt_map[ca] = state.offset;
2732 }
2733
2734 if (layout == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002735 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002736
2737 for (uint32_t set = 0; set < layout->num_sets; set++) {
2738 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2739 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2740 struct anv_descriptor_slot *surface_slots =
2741 set_layout->stage[stage].surface_start;
2742
2743 uint32_t start = bias + layout->set[set].surface_start[stage];
2744
2745 for (uint32_t b = 0; b < set_layout->stage[stage].surface_count; b++) {
2746 struct anv_surface_view *view =
2747 d->set->descriptors[surface_slots[b].index].view;
2748
Jason Ekstrand03ffa9c2015-05-29 20:43:10 -07002749 if (!view)
2750 continue;
2751
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002752 struct anv_state state =
2753 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2754
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002755 if (state.map == NULL)
2756 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2757
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002758 uint32_t offset;
2759 if (surface_slots[b].dynamic_slot >= 0) {
2760 uint32_t dynamic_offset =
2761 d->dynamic_offsets[surface_slots[b].dynamic_slot];
2762
2763 offset = view->offset + dynamic_offset;
2764 fill_buffer_surface_state(state.map, view->format, offset,
2765 view->range - dynamic_offset);
2766 } else {
2767 offset = view->offset;
2768 memcpy(state.map, view->surface_state.map, 64);
2769 }
2770
2771 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2772 *(uint64_t *)(state.map + 8 * 4) =
2773 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2774 cmd_buffer->device,
2775 state.offset + 8 * 4,
2776 view->bo, offset);
2777
2778 bt_map[start + b] = state.offset;
2779 }
2780 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002781
2782 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002783}
2784
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002785static VkResult
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002786cmd_buffer_emit_samplers(struct anv_cmd_buffer *cmd_buffer,
2787 unsigned stage, struct anv_state *state)
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002788{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002789 struct anv_pipeline_layout *layout;
2790 uint32_t sampler_count;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002791
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002792 if (stage == VK_SHADER_STAGE_COMPUTE)
2793 layout = cmd_buffer->compute_pipeline->layout;
2794 else
2795 layout = cmd_buffer->pipeline->layout;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002796
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002797 sampler_count = layout ? layout->stage[stage].sampler_count : 0;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002798 if (sampler_count == 0)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002799 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002800
2801 uint32_t size = sampler_count * 16;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002802 *state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream, size, 32);
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002803
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002804 if (state->map == NULL)
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002805 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2806
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002807 for (uint32_t set = 0; set < layout->num_sets; set++) {
2808 struct anv_descriptor_set_binding *d = &cmd_buffer->descriptors[set];
2809 struct anv_descriptor_set_layout *set_layout = layout->set[set].layout;
2810 struct anv_descriptor_slot *sampler_slots =
2811 set_layout->stage[stage].sampler_start;
2812
2813 uint32_t start = layout->set[set].sampler_start[stage];
2814
2815 for (uint32_t b = 0; b < set_layout->stage[stage].sampler_count; b++) {
2816 struct anv_sampler *sampler =
2817 d->set->descriptors[sampler_slots[b].index].sampler;
2818
2819 if (!sampler)
2820 continue;
2821
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002822 memcpy(state->map + (start + b) * 16,
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002823 sampler->state, sizeof(sampler->state));
2824 }
2825 }
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002826
2827 return VK_SUCCESS;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002828}
2829
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002830static VkResult
2831flush_descriptor_set(struct anv_cmd_buffer *cmd_buffer, uint32_t stage)
2832{
2833 struct anv_state surfaces = { 0, }, samplers = { 0, };
2834 VkResult result;
2835
2836 result = cmd_buffer_emit_samplers(cmd_buffer, stage, &samplers);
2837 if (result != VK_SUCCESS)
2838 return result;
2839 result = cmd_buffer_emit_binding_table(cmd_buffer, stage, &surfaces);
2840 if (result != VK_SUCCESS)
2841 return result;
2842
2843 static const uint32_t sampler_state_opcodes[] = {
2844 [VK_SHADER_STAGE_VERTEX] = 43,
2845 [VK_SHADER_STAGE_TESS_CONTROL] = 44, /* HS */
2846 [VK_SHADER_STAGE_TESS_EVALUATION] = 45, /* DS */
2847 [VK_SHADER_STAGE_GEOMETRY] = 46,
2848 [VK_SHADER_STAGE_FRAGMENT] = 47,
2849 [VK_SHADER_STAGE_COMPUTE] = 0,
2850 };
2851
2852 static const uint32_t binding_table_opcodes[] = {
2853 [VK_SHADER_STAGE_VERTEX] = 38,
2854 [VK_SHADER_STAGE_TESS_CONTROL] = 39,
2855 [VK_SHADER_STAGE_TESS_EVALUATION] = 40,
2856 [VK_SHADER_STAGE_GEOMETRY] = 41,
2857 [VK_SHADER_STAGE_FRAGMENT] = 42,
2858 [VK_SHADER_STAGE_COMPUTE] = 0,
2859 };
2860
2861 if (samplers.alloc_size > 0) {
2862 anv_batch_emit(&cmd_buffer->batch,
2863 GEN8_3DSTATE_SAMPLER_STATE_POINTERS_VS,
2864 ._3DCommandSubOpcode = sampler_state_opcodes[stage],
2865 .PointertoVSSamplerState = samplers.offset);
2866 }
2867
2868 if (surfaces.alloc_size > 0) {
2869 anv_batch_emit(&cmd_buffer->batch,
2870 GEN8_3DSTATE_BINDING_TABLE_POINTERS_VS,
2871 ._3DCommandSubOpcode = binding_table_opcodes[stage],
2872 .PointertoVSBindingTable = surfaces.offset);
2873 }
2874
2875 return VK_SUCCESS;
2876}
2877
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002878static void
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002879flush_descriptor_sets(struct anv_cmd_buffer *cmd_buffer)
2880{
Jason Ekstrand22513052015-05-30 10:07:29 -07002881 uint32_t s, dirty = cmd_buffer->descriptors_dirty &
2882 cmd_buffer->pipeline->active_stages;
2883
Jason Ekstrand5a4ebf62015-07-08 17:29:49 -07002884 VkResult result = VK_SUCCESS;
Jason Ekstrand22513052015-05-30 10:07:29 -07002885 for_each_bit(s, dirty) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002886 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002887 if (result != VK_SUCCESS)
2888 break;
2889 }
2890
2891 if (result != VK_SUCCESS) {
2892 assert(result == VK_ERROR_OUT_OF_DEVICE_MEMORY);
2893
2894 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
2895 assert(result == VK_SUCCESS);
2896
Jason Ekstrand22513052015-05-30 10:07:29 -07002897 /* Re-emit all active binding tables */
2898 for_each_bit(s, cmd_buffer->pipeline->active_stages) {
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002899 result = flush_descriptor_set(cmd_buffer, s);
Jason Ekstrand4ffbab52015-05-29 09:40:03 -07002900
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002901 /* It had better succeed this time */
2902 assert(result == VK_SUCCESS);
2903 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002904 }
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07002905
Jason Ekstrand22513052015-05-30 10:07:29 -07002906 cmd_buffer->descriptors_dirty &= ~cmd_buffer->pipeline->active_stages;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002907}
2908
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002909static struct anv_state
2910anv_cmd_buffer_emit_dynamic(struct anv_cmd_buffer *cmd_buffer,
2911 uint32_t *a, uint32_t dwords, uint32_t alignment)
2912{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002913 struct anv_state state;
2914
Jason Ekstrandce002332015-06-05 17:14:41 -07002915 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2916 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002917 memcpy(state.map, a, dwords * 4);
2918
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002919 VG(VALGRIND_CHECK_MEM_IS_DEFINED(state.map, dwords * 4));
2920
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002921 return state;
2922}
2923
2924static struct anv_state
2925anv_cmd_buffer_merge_dynamic(struct anv_cmd_buffer *cmd_buffer,
Jason Ekstrandce002332015-06-05 17:14:41 -07002926 uint32_t *a, uint32_t *b,
2927 uint32_t dwords, uint32_t alignment)
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002928{
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002929 struct anv_state state;
2930 uint32_t *p;
2931
Jason Ekstrandce002332015-06-05 17:14:41 -07002932 state = anv_state_stream_alloc(&cmd_buffer->dynamic_state_stream,
2933 dwords * 4, alignment);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002934 p = state.map;
2935 for (uint32_t i = 0; i < dwords; i++)
2936 p[i] = a[i] | b[i];
2937
Jason Ekstrand9cae3d12015-06-09 21:36:12 -07002938 VG(VALGRIND_CHECK_MEM_IS_DEFINED(p, dwords * 4));
2939
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07002940 return state;
2941}
2942
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07002943static VkResult
2944flush_compute_descriptor_set(struct anv_cmd_buffer *cmd_buffer)
2945{
2946 struct anv_device *device = cmd_buffer->device;
2947 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2948 struct anv_state surfaces = { 0, }, samplers = { 0, };
2949 VkResult result;
2950
2951 result = cmd_buffer_emit_samplers(cmd_buffer,
2952 VK_SHADER_STAGE_COMPUTE, &samplers);
2953 if (result != VK_SUCCESS)
2954 return result;
2955 result = cmd_buffer_emit_binding_table(cmd_buffer,
2956 VK_SHADER_STAGE_COMPUTE, &surfaces);
2957 if (result != VK_SUCCESS)
2958 return result;
2959
2960 struct GEN8_INTERFACE_DESCRIPTOR_DATA desc = {
2961 .KernelStartPointer = pipeline->cs_simd,
2962 .KernelStartPointerHigh = 0,
2963 .BindingTablePointer = surfaces.offset,
2964 .BindingTableEntryCount = 0,
2965 .SamplerStatePointer = samplers.offset,
2966 .SamplerCount = 0,
2967 .NumberofThreadsinGPGPUThreadGroup = 0 /* FIXME: Really? */
2968 };
2969
2970 uint32_t size = GEN8_INTERFACE_DESCRIPTOR_DATA_length * sizeof(uint32_t);
2971 struct anv_state state =
2972 anv_state_pool_alloc(&device->dynamic_state_pool, size, 64);
2973
2974 GEN8_INTERFACE_DESCRIPTOR_DATA_pack(NULL, state.map, &desc);
2975
2976 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_INTERFACE_DESCRIPTOR_LOAD,
2977 .InterfaceDescriptorTotalLength = size,
2978 .InterfaceDescriptorDataStartAddress = state.offset);
2979
2980 return VK_SUCCESS;
2981}
2982
2983static void
2984anv_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer)
2985{
2986 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
2987 VkResult result;
2988
2989 assert(pipeline->active_stages == VK_SHADER_STAGE_COMPUTE_BIT);
2990
2991 if (cmd_buffer->current_pipeline != GPGPU) {
2992 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
2993 .PipelineSelection = GPGPU);
2994 cmd_buffer->current_pipeline = GPGPU;
2995 }
2996
2997 if (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)
2998 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
2999
3000 if ((cmd_buffer->descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) ||
3001 (cmd_buffer->compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) {
3002 result = flush_compute_descriptor_set(cmd_buffer);
3003 if (result != VK_SUCCESS) {
3004 result = anv_cmd_buffer_new_surface_state_bo(cmd_buffer);
3005 assert(result == VK_SUCCESS);
3006 result = flush_compute_descriptor_set(cmd_buffer);
3007 assert(result == VK_SUCCESS);
3008 }
3009 cmd_buffer->descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE;
3010 }
3011
3012 cmd_buffer->compute_dirty = 0;
3013}
3014
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003015static void
3016anv_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
3017{
3018 struct anv_pipeline *pipeline = cmd_buffer->pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003019 uint32_t *p;
3020
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003021 uint32_t vb_emit = cmd_buffer->vb_dirty & pipeline->vb_used;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003022
Kristian Høgsberg Kristensen7637b022015-06-11 15:21:49 -07003023 assert((pipeline->active_stages & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
3024
3025 if (cmd_buffer->current_pipeline != _3D) {
3026 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
3027 .PipelineSelection = _3D);
3028 cmd_buffer->current_pipeline = _3D;
3029 }
3030
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003031 if (vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003032 const uint32_t num_buffers = __builtin_popcount(vb_emit);
3033 const uint32_t num_dwords = 1 + num_buffers * 4;
3034
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003035 p = anv_batch_emitn(&cmd_buffer->batch, num_dwords,
3036 GEN8_3DSTATE_VERTEX_BUFFERS);
3037 uint32_t vb, i = 0;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003038 for_each_bit(vb, vb_emit) {
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003039 struct anv_buffer *buffer = cmd_buffer->vertex_bindings[vb].buffer;
3040 uint32_t offset = cmd_buffer->vertex_bindings[vb].offset;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003041
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003042 struct GEN8_VERTEX_BUFFER_STATE state = {
3043 .VertexBufferIndex = vb,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07003044 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003045 .AddressModifyEnable = true,
3046 .BufferPitch = pipeline->binding_stride[vb],
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003047 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003048 .BufferSize = buffer->size - offset
3049 };
3050
3051 GEN8_VERTEX_BUFFER_STATE_pack(&cmd_buffer->batch, &p[1 + i * 4], &state);
3052 i++;
3053 }
3054 }
3055
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003056 if (cmd_buffer->dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) {
3057 /* If somebody compiled a pipeline after starting a command buffer the
3058 * scratch bo may have grown since we started this cmd buffer (and
3059 * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now,
3060 * reemit STATE_BASE_ADDRESS so that we use the bigger scratch bo. */
3061 if (cmd_buffer->scratch_size < pipeline->total_scratch)
3062 anv_cmd_buffer_emit_state_base_address(cmd_buffer);
3063
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003064 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
Kristian Høgsberg Kristensen9b9f9732015-06-19 15:41:30 -07003065 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003066
Jason Ekstrand22513052015-05-30 10:07:29 -07003067 if (cmd_buffer->descriptors_dirty)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003068 flush_descriptor_sets(cmd_buffer);
3069
Kristian Høgsberg Kristensene7edde62015-06-11 15:04:09 -07003070 if (cmd_buffer->dirty & ANV_CMD_BUFFER_VP_DIRTY) {
3071 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SCISSOR_STATE_POINTERS,
3072 .ScissorRectPointer = cmd_buffer->vp_state->scissor.offset);
3073 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_CC,
3074 .CCViewportPointer = cmd_buffer->vp_state->cc_vp.offset);
3075 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_SF_CLIP,
3076 .SFClipViewportPointer = cmd_buffer->vp_state->sf_clip_vp.offset);
3077 }
3078
Kristian Høgsberg99883772015-05-26 09:40:10 -07003079 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_RS_DIRTY)) {
Kristian Høgsberg55b9b702015-05-11 22:23:38 -07003080 anv_batch_emit_merge(&cmd_buffer->batch,
3081 cmd_buffer->rs_state->state_sf, pipeline->state_sf);
Kristian Høgsberg99883772015-05-26 09:40:10 -07003082 anv_batch_emit_merge(&cmd_buffer->batch,
3083 cmd_buffer->rs_state->state_raster, pipeline->state_raster);
3084 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003085
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07003086 if (cmd_buffer->ds_state &&
3087 (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)))
3088 anv_batch_emit_merge(&cmd_buffer->batch,
3089 cmd_buffer->ds_state->state_wm_depth_stencil,
3090 pipeline->state_wm_depth_stencil);
3091
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003092 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_CB_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)) {
3093 struct anv_state state;
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003094 if (cmd_buffer->ds_state == NULL)
3095 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3096 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003097 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003098 else if (cmd_buffer->cb_state == NULL)
3099 state = anv_cmd_buffer_emit_dynamic(cmd_buffer,
3100 cmd_buffer->ds_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003101 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsberga1d30f82015-05-26 17:12:18 -07003102 else
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003103 state = anv_cmd_buffer_merge_dynamic(cmd_buffer,
3104 cmd_buffer->ds_state->state_color_calc,
3105 cmd_buffer->cb_state->state_color_calc,
Jason Ekstrande69588b2015-06-05 17:26:01 -07003106 GEN8_COLOR_CALC_STATE_length, 64);
Kristian Høgsbergb29f4422015-05-26 11:22:12 -07003107
3108 anv_batch_emit(&cmd_buffer->batch,
3109 GEN8_3DSTATE_CC_STATE_POINTERS,
3110 .ColorCalcStatePointer = state.offset,
3111 .ColorCalcStatePointerValid = true);
3112 }
3113
Jason Ekstrand7fbed522015-07-07 15:11:56 -07003114 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) {
3115 anv_batch_emit_merge(&cmd_buffer->batch,
3116 cmd_buffer->state_vf, pipeline->state_vf);
3117 }
3118
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07003119 cmd_buffer->vb_dirty &= ~vb_emit;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003120 cmd_buffer->dirty = 0;
3121}
3122
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003123void anv_CmdDraw(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003124 VkCmdBuffer cmdBuffer,
3125 uint32_t firstVertex,
3126 uint32_t vertexCount,
3127 uint32_t firstInstance,
3128 uint32_t instanceCount)
3129{
3130 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3131
3132 anv_cmd_buffer_flush_state(cmd_buffer);
3133
3134 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3135 .VertexAccessType = SEQUENTIAL,
3136 .VertexCountPerInstance = vertexCount,
3137 .StartVertexLocation = firstVertex,
3138 .InstanceCount = instanceCount,
3139 .StartInstanceLocation = firstInstance,
3140 .BaseVertexLocation = 0);
3141}
3142
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003143void anv_CmdDrawIndexed(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003144 VkCmdBuffer cmdBuffer,
3145 uint32_t firstIndex,
3146 uint32_t indexCount,
3147 int32_t vertexOffset,
3148 uint32_t firstInstance,
3149 uint32_t instanceCount)
3150{
3151 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3152
3153 anv_cmd_buffer_flush_state(cmd_buffer);
3154
3155 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3156 .VertexAccessType = RANDOM,
3157 .VertexCountPerInstance = indexCount,
3158 .StartVertexLocation = firstIndex,
3159 .InstanceCount = instanceCount,
3160 .StartInstanceLocation = firstInstance,
Kristian Høgsberg Kristensenc8f07852015-06-02 22:35:47 -07003161 .BaseVertexLocation = vertexOffset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003162}
3163
3164static void
3165anv_batch_lrm(struct anv_batch *batch,
3166 uint32_t reg, struct anv_bo *bo, uint32_t offset)
3167{
3168 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3169 .RegisterAddress = reg,
3170 .MemoryAddress = { bo, offset });
3171}
3172
3173static void
3174anv_batch_lri(struct anv_batch *batch, uint32_t reg, uint32_t imm)
3175{
3176 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_IMM,
3177 .RegisterOffset = reg,
3178 .DataDWord = imm);
3179}
3180
3181/* Auto-Draw / Indirect Registers */
3182#define GEN7_3DPRIM_END_OFFSET 0x2420
3183#define GEN7_3DPRIM_START_VERTEX 0x2430
3184#define GEN7_3DPRIM_VERTEX_COUNT 0x2434
3185#define GEN7_3DPRIM_INSTANCE_COUNT 0x2438
3186#define GEN7_3DPRIM_START_INSTANCE 0x243C
3187#define GEN7_3DPRIM_BASE_VERTEX 0x2440
3188
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003189void anv_CmdDrawIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003190 VkCmdBuffer cmdBuffer,
3191 VkBuffer _buffer,
3192 VkDeviceSize offset,
3193 uint32_t count,
3194 uint32_t stride)
3195{
3196 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3197 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003198 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003199 uint32_t bo_offset = buffer->offset + offset;
3200
3201 anv_cmd_buffer_flush_state(cmd_buffer);
3202
3203 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3204 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3205 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3206 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 12);
3207 anv_batch_lri(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, 0);
3208
3209 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3210 .IndirectParameterEnable = true,
3211 .VertexAccessType = SEQUENTIAL);
3212}
3213
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003214void anv_CmdDrawIndexedIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003215 VkCmdBuffer cmdBuffer,
3216 VkBuffer _buffer,
3217 VkDeviceSize offset,
3218 uint32_t count,
3219 uint32_t stride)
3220{
3221 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3222 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07003223 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003224 uint32_t bo_offset = buffer->offset + offset;
3225
3226 anv_cmd_buffer_flush_state(cmd_buffer);
3227
3228 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
3229 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
3230 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
3231 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, bo, bo_offset + 12);
3232 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 16);
3233
3234 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
3235 .IndirectParameterEnable = true,
3236 .VertexAccessType = RANDOM);
3237}
3238
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003239void anv_CmdDispatch(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003240 VkCmdBuffer cmdBuffer,
3241 uint32_t x,
3242 uint32_t y,
3243 uint32_t z)
3244{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003245 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003246 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3247 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003248
3249 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3250
3251 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003252 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003253 .ThreadDepthCounterMaximum = 0,
3254 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003255 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003256 .ThreadGroupIDXDimension = x,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003257 .ThreadGroupIDYDimension = y,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003258 .ThreadGroupIDZDimension = z,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003259 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003260 .BottomExecutionMask = 0xffffffff);
3261
3262 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003263}
3264
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003265#define GPGPU_DISPATCHDIMX 0x2500
3266#define GPGPU_DISPATCHDIMY 0x2504
3267#define GPGPU_DISPATCHDIMZ 0x2508
3268
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003269void anv_CmdDispatchIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003270 VkCmdBuffer cmdBuffer,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003271 VkBuffer _buffer,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003272 VkDeviceSize offset)
3273{
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003274 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003275 struct anv_pipeline *pipeline = cmd_buffer->compute_pipeline;
3276 struct brw_cs_prog_data *prog_data = &pipeline->cs_prog_data;
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003277 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
3278 struct anv_bo *bo = buffer->bo;
3279 uint32_t bo_offset = buffer->offset + offset;
3280
3281 anv_cmd_buffer_flush_compute_state(cmd_buffer);
3282
3283 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMX, bo, bo_offset);
3284 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMY, bo, bo_offset + 4);
3285 anv_batch_lrm(&cmd_buffer->batch, GPGPU_DISPATCHDIMZ, bo, bo_offset + 8);
3286
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003287 anv_batch_emit(&cmd_buffer->batch, GEN8_GPGPU_WALKER,
3288 .IndirectParameterEnable = true,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003289 .SIMDSize = prog_data->simd_size / 16,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003290 .ThreadDepthCounterMaximum = 0,
3291 .ThreadHeightCounterMaximum = 0,
Kristian Høgsberg Kristensenfa8a0772015-06-12 17:21:01 -07003292 .ThreadWidthCounterMaximum = pipeline->cs_thread_width_max,
3293 .RightExecutionMask = pipeline->cs_right_mask,
Kristian Høgsberg Kristensen765175f2015-06-11 15:31:42 -07003294 .BottomExecutionMask = 0xffffffff);
3295
3296 anv_batch_emit(&cmd_buffer->batch, GEN8_MEDIA_STATE_FLUSH);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003297}
3298
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003299void anv_CmdSetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003300 VkCmdBuffer cmdBuffer,
3301 VkEvent event,
3302 VkPipeEvent pipeEvent)
3303{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003304 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003305}
3306
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003307void anv_CmdResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003308 VkCmdBuffer cmdBuffer,
3309 VkEvent event,
3310 VkPipeEvent pipeEvent)
3311{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003312 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003313}
3314
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003315void anv_CmdWaitEvents(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003316 VkCmdBuffer cmdBuffer,
3317 VkWaitEvent waitEvent,
3318 uint32_t eventCount,
3319 const VkEvent* pEvents,
Chad Versace85c0d692015-07-07 15:49:57 -07003320 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003321 uint32_t memBarrierCount,
Chad Versace85c0d692015-07-07 15:49:57 -07003322 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003323{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003324 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003325}
3326
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003327void anv_CmdPipelineBarrier(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003328 VkCmdBuffer cmdBuffer,
3329 VkWaitEvent waitEvent,
Chad Versace18ee32e2015-07-07 15:42:38 -07003330 VkPipeEventFlags pipeEventMask,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003331 uint32_t memBarrierCount,
Chad Versace18ee32e2015-07-07 15:42:38 -07003332 const void* const* ppMemBarriers)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003333{
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003334 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3335 uint32_t b, *dw;
3336
3337 struct GEN8_PIPE_CONTROL cmd = {
3338 GEN8_PIPE_CONTROL_header,
3339 .PostSyncOperation = NoWrite,
3340 };
3341
3342 /* XXX: I think waitEvent is a no-op on our HW. We should verify that. */
3343
Chad Versace18ee32e2015-07-07 15:42:38 -07003344 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_TOP_OF_PIPE_BIT)) {
3345 /* This is just what PIPE_CONTROL does */
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003346 }
3347
Chad Versace18ee32e2015-07-07 15:42:38 -07003348 if (anv_clear_mask(&pipeEventMask,
3349 VK_PIPE_EVENT_VERTEX_PROCESSING_COMPLETE_BIT |
3350 VK_PIPE_EVENT_LOCAL_FRAGMENT_PROCESSING_COMPLETE_BIT |
3351 VK_PIPE_EVENT_FRAGMENT_PROCESSING_COMPLETE_BIT)) {
3352 cmd.StallAtPixelScoreboard = true;
3353 }
3354
3355
3356 if (anv_clear_mask(&pipeEventMask,
3357 VK_PIPE_EVENT_GRAPHICS_PIPELINE_COMPLETE_BIT |
3358 VK_PIPE_EVENT_COMPUTE_PIPELINE_COMPLETE_BIT |
3359 VK_PIPE_EVENT_TRANSFER_COMPLETE_BIT |
3360 VK_PIPE_EVENT_COMMANDS_COMPLETE_BIT)) {
3361 cmd.CommandStreamerStallEnable = true;
3362 }
3363
3364 if (anv_clear_mask(&pipeEventMask, VK_PIPE_EVENT_CPU_SIGNAL_BIT)) {
3365 anv_finishme("VK_PIPE_EVENT_CPU_SIGNAL_BIT");
3366 }
3367
3368 /* We checked all known VkPipeEventFlags. */
3369 anv_assert(pipeEventMask == 0);
3370
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003371 /* XXX: Right now, we're really dumb and just flush whatever categories
3372 * the app asks for. One of these days we may make this a bit better
3373 * but right now that's all the hardware allows for in most areas.
3374 */
3375 VkMemoryOutputFlags out_flags = 0;
3376 VkMemoryInputFlags in_flags = 0;
3377
3378 for (uint32_t i = 0; i < memBarrierCount; i++) {
3379 const struct anv_common *common = ppMemBarriers[i];
3380 switch (common->sType) {
3381 case VK_STRUCTURE_TYPE_MEMORY_BARRIER: {
3382 const VkMemoryBarrier *barrier = (VkMemoryBarrier *)common;
3383 out_flags |= barrier->outputMask;
3384 in_flags |= barrier->inputMask;
3385 break;
3386 }
3387 case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER: {
3388 const VkBufferMemoryBarrier *barrier = (VkBufferMemoryBarrier *)common;
3389 out_flags |= barrier->outputMask;
3390 in_flags |= barrier->inputMask;
3391 break;
3392 }
3393 case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER: {
3394 const VkImageMemoryBarrier *barrier = (VkImageMemoryBarrier *)common;
3395 out_flags |= barrier->outputMask;
3396 in_flags |= barrier->inputMask;
3397 break;
3398 }
3399 default:
3400 unreachable("Invalid memory barrier type");
3401 }
3402 }
3403
3404 for_each_bit(b, out_flags) {
3405 switch ((VkMemoryOutputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003406 case VK_MEMORY_OUTPUT_HOST_WRITE_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003407 break; /* FIXME: Little-core systems */
3408 case VK_MEMORY_OUTPUT_SHADER_WRITE_BIT:
3409 cmd.DCFlushEnable = true;
3410 break;
3411 case VK_MEMORY_OUTPUT_COLOR_ATTACHMENT_BIT:
3412 cmd.RenderTargetCacheFlushEnable = true;
3413 break;
3414 case VK_MEMORY_OUTPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3415 cmd.DepthCacheFlushEnable = true;
3416 break;
3417 case VK_MEMORY_OUTPUT_TRANSFER_BIT:
3418 cmd.RenderTargetCacheFlushEnable = true;
3419 cmd.DepthCacheFlushEnable = true;
3420 break;
3421 default:
3422 unreachable("Invalid memory output flag");
3423 }
3424 }
3425
3426 for_each_bit(b, out_flags) {
3427 switch ((VkMemoryInputFlags)(1 << b)) {
Jason Ekstrand2b404e52015-07-06 17:18:25 -07003428 case VK_MEMORY_INPUT_HOST_READ_BIT:
Jason Ekstrand29d2bbb2015-06-10 16:37:31 -07003429 break; /* FIXME: Little-core systems */
3430 case VK_MEMORY_INPUT_INDIRECT_COMMAND_BIT:
3431 case VK_MEMORY_INPUT_INDEX_FETCH_BIT:
3432 case VK_MEMORY_INPUT_VERTEX_ATTRIBUTE_FETCH_BIT:
3433 cmd.VFCacheInvalidationEnable = true;
3434 break;
3435 case VK_MEMORY_INPUT_UNIFORM_READ_BIT:
3436 cmd.ConstantCacheInvalidationEnable = true;
3437 /* fallthrough */
3438 case VK_MEMORY_INPUT_SHADER_READ_BIT:
3439 cmd.DCFlushEnable = true;
3440 cmd.TextureCacheInvalidationEnable = true;
3441 break;
3442 case VK_MEMORY_INPUT_COLOR_ATTACHMENT_BIT:
3443 case VK_MEMORY_INPUT_DEPTH_STENCIL_ATTACHMENT_BIT:
3444 break; /* XXX: Hunh? */
3445 case VK_MEMORY_INPUT_TRANSFER_BIT:
3446 cmd.TextureCacheInvalidationEnable = true;
3447 break;
3448 }
3449 }
3450
3451 dw = anv_batch_emit_dwords(&cmd_buffer->batch, GEN8_PIPE_CONTROL_length);
3452 GEN8_PIPE_CONTROL_pack(&cmd_buffer->batch, dw, &cmd);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003453}
3454
Jason Ekstrand57153da2015-05-22 15:15:08 -07003455static void
3456anv_framebuffer_destroy(struct anv_device *device,
3457 struct anv_object *object,
3458 VkObjectType obj_type)
3459{
3460 struct anv_framebuffer *fb = (struct anv_framebuffer *)object;
3461
3462 assert(obj_type == VK_OBJECT_TYPE_FRAMEBUFFER);
3463
3464 anv_DestroyObject((VkDevice) device,
3465 VK_OBJECT_TYPE_DYNAMIC_VP_STATE,
3466 fb->vp_state);
3467
3468 anv_device_free(device, fb);
3469}
3470
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003471VkResult anv_CreateFramebuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003472 VkDevice _device,
3473 const VkFramebufferCreateInfo* pCreateInfo,
3474 VkFramebuffer* pFramebuffer)
3475{
3476 struct anv_device *device = (struct anv_device *) _device;
3477 struct anv_framebuffer *framebuffer;
3478
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003479 static const struct anv_depth_stencil_view null_view =
3480 { .depth_format = D16_UNORM, .depth_stride = 0, .stencil_stride = 0 };
3481
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003482 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
3483
3484 framebuffer = anv_device_alloc(device, sizeof(*framebuffer), 8,
3485 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3486 if (framebuffer == NULL)
3487 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3488
Jason Ekstrand57153da2015-05-22 15:15:08 -07003489 framebuffer->base.destructor = anv_framebuffer_destroy;
3490
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003491 framebuffer->color_attachment_count = pCreateInfo->colorAttachmentCount;
3492 for (uint32_t i = 0; i < pCreateInfo->colorAttachmentCount; i++) {
3493 framebuffer->color_attachments[i] =
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07003494 (struct anv_surface_view *) pCreateInfo->pColorAttachments[i].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003495 }
3496
3497 if (pCreateInfo->pDepthStencilAttachment) {
3498 framebuffer->depth_stencil =
3499 (struct anv_depth_stencil_view *) pCreateInfo->pDepthStencilAttachment->view;
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003500 } else {
3501 framebuffer->depth_stencil = &null_view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003502 }
3503
3504 framebuffer->sample_count = pCreateInfo->sampleCount;
3505 framebuffer->width = pCreateInfo->width;
3506 framebuffer->height = pCreateInfo->height;
3507 framebuffer->layers = pCreateInfo->layers;
3508
Jason Ekstrand919e7b72015-06-09 16:01:56 -07003509 anv_CreateDynamicViewportState((VkDevice) device,
Jason Ekstrand0599d392015-06-09 15:53:10 -07003510 &(VkDynamicVpStateCreateInfo) {
3511 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO,
3512 .viewportAndScissorCount = 1,
3513 .pViewports = (VkViewport[]) {
3514 {
3515 .originX = 0,
3516 .originY = 0,
3517 .width = pCreateInfo->width,
3518 .height = pCreateInfo->height,
3519 .minDepth = 0,
3520 .maxDepth = 1
3521 },
3522 },
Jason Ekstrand1f1b26b2015-07-06 17:47:18 -07003523 .pScissors = (VkRect2D[]) {
Jason Ekstrand0599d392015-06-09 15:53:10 -07003524 { { 0, 0 },
3525 { pCreateInfo->width, pCreateInfo->height } },
3526 }
3527 },
3528 &framebuffer->vp_state);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003529
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003530 *pFramebuffer = (VkFramebuffer) framebuffer;
3531
3532 return VK_SUCCESS;
3533}
3534
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003535VkResult anv_CreateRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003536 VkDevice _device,
3537 const VkRenderPassCreateInfo* pCreateInfo,
3538 VkRenderPass* pRenderPass)
3539{
3540 struct anv_device *device = (struct anv_device *) _device;
3541 struct anv_render_pass *pass;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003542 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003543
3544 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
3545
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003546 size = sizeof(*pass) +
3547 pCreateInfo->layers * sizeof(struct anv_render_pass_layer);
3548 pass = anv_device_alloc(device, size, 8,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003549 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3550 if (pass == NULL)
3551 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3552
3553 pass->render_area = pCreateInfo->renderArea;
3554
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003555 pass->num_layers = pCreateInfo->layers;
3556
3557 pass->num_clear_layers = 0;
3558 for (uint32_t i = 0; i < pCreateInfo->layers; i++) {
3559 pass->layers[i].color_load_op = pCreateInfo->pColorLoadOps[i];
3560 pass->layers[i].clear_color = pCreateInfo->pColorLoadClearValues[i];
3561 if (pass->layers[i].color_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR)
3562 pass->num_clear_layers++;
3563 }
3564
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003565 *pRenderPass = (VkRenderPass) pass;
3566
3567 return VK_SUCCESS;
3568}
3569
Jason Ekstrand0ff06542015-07-07 17:11:35 -07003570VkResult anv_GetRenderAreaGranularity(
3571 VkDevice device,
3572 VkRenderPass renderPass,
3573 VkExtent2D* pGranularity)
3574{
3575 *pGranularity = (VkExtent2D) { 1, 1 };
3576
3577 return VK_SUCCESS;
3578}
3579
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003580static void
3581anv_cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer,
3582 struct anv_render_pass *pass)
3583{
3584 const struct anv_depth_stencil_view *view =
3585 cmd_buffer->framebuffer->depth_stencil;
3586
3587 /* FIXME: Implement the PMA stall W/A */
Chad Versace709fa462015-06-26 22:15:03 -07003588 /* FIXME: Width and Height are wrong */
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003589
3590 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DEPTH_BUFFER,
3591 .SurfaceType = SURFTYPE_2D,
3592 .DepthWriteEnable = view->depth_stride > 0,
3593 .StencilWriteEnable = view->stencil_stride > 0,
3594 .HierarchicalDepthBufferEnable = false,
3595 .SurfaceFormat = view->depth_format,
3596 .SurfacePitch = view->depth_stride > 0 ? view->depth_stride - 1 : 0,
3597 .SurfaceBaseAddress = { view->bo, view->depth_offset },
3598 .Height = pass->render_area.extent.height - 1,
3599 .Width = pass->render_area.extent.width - 1,
3600 .LOD = 0,
3601 .Depth = 1 - 1,
3602 .MinimumArrayElement = 0,
3603 .DepthBufferObjectControlState = GEN8_MOCS,
3604 .RenderTargetViewExtent = 1 - 1,
Chad Versace7ea707a2015-06-25 19:46:42 -07003605 .SurfaceQPitch = view->depth_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003606
3607 /* Disable hierarchial depth buffers. */
3608 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HIER_DEPTH_BUFFER);
3609
3610 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STENCIL_BUFFER,
3611 .StencilBufferEnable = view->stencil_stride > 0,
3612 .StencilBufferObjectControlState = GEN8_MOCS,
3613 .SurfacePitch = view->stencil_stride > 0 ? view->stencil_stride - 1 : 0,
3614 .SurfaceBaseAddress = { view->bo, view->stencil_offset },
Chad Versace7ea707a2015-06-25 19:46:42 -07003615 .SurfaceQPitch = view->stencil_qpitch >> 2);
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003616
3617 /* Clear the clear params. */
3618 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_CLEAR_PARAMS);
3619}
3620
Chad Versacef78d6842015-07-07 15:46:19 -07003621void anv_CmdPushConstants(
3622 VkCmdBuffer cmdBuffer,
3623 VkPipelineLayout layout,
3624 VkShaderStageFlags stageFlags,
3625 uint32_t start,
3626 uint32_t length,
3627 const void* values)
3628{
3629 stub();
3630}
3631
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003632void anv_CmdBeginRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003633 VkCmdBuffer cmdBuffer,
3634 const VkRenderPassBegin* pRenderPassBegin)
3635{
3636 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3637 struct anv_render_pass *pass = (struct anv_render_pass *) pRenderPassBegin->renderPass;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003638 struct anv_framebuffer *framebuffer =
3639 (struct anv_framebuffer *) pRenderPassBegin->framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003640
Jason Ekstrand52940e82015-07-08 10:57:13 -07003641 assert(pRenderPassBegin->contents == VK_RENDER_PASS_CONTENTS_INLINE);
3642
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003643 cmd_buffer->framebuffer = framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003644
Jason Ekstrand22513052015-05-30 10:07:29 -07003645 cmd_buffer->descriptors_dirty |= VK_SHADER_STAGE_FRAGMENT_BIT;
Jason Ekstrandc4bd5f82015-05-29 15:16:58 -07003646
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003647 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DRAWING_RECTANGLE,
3648 .ClippedDrawingRectangleYMin = pass->render_area.offset.y,
3649 .ClippedDrawingRectangleXMin = pass->render_area.offset.x,
3650 .ClippedDrawingRectangleYMax =
3651 pass->render_area.offset.y + pass->render_area.extent.height - 1,
3652 .ClippedDrawingRectangleXMax =
3653 pass->render_area.offset.x + pass->render_area.extent.width - 1,
3654 .DrawingRectangleOriginY = 0,
3655 .DrawingRectangleOriginX = 0);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003656
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003657 anv_cmd_buffer_emit_depth_stencil(cmd_buffer, pass);
3658
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003659 anv_cmd_buffer_clear(cmd_buffer, pass);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003660}
3661
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003662void anv_CmdEndRenderPass(
Jason Ekstranda35fef12015-07-07 16:22:23 -07003663 VkCmdBuffer cmdBuffer)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003664{
Jason Ekstranda1309c52015-05-13 22:13:05 -07003665 /* Emit a flushing pipe control at the end of a pass. This is kind of a
3666 * hack but it ensures that render targets always actually get written.
3667 * Eventually, we should do flushing based on image format transitions
3668 * or something of that nature.
3669 */
3670 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3671 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3672 .PostSyncOperation = NoWrite,
3673 .RenderTargetCacheFlushEnable = true,
3674 .InstructionCacheInvalidateEnable = true,
3675 .DepthCacheFlushEnable = true,
3676 .VFCacheInvalidationEnable = true,
3677 .TextureCacheInvalidationEnable = true,
3678 .CommandStreamerStallEnable = true);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003679}
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003680
Chad Versacedff32232015-07-07 15:51:55 -07003681void anv_CmdExecuteCommands(
3682 VkCmdBuffer cmdBuffer,
3683 uint32_t cmdBuffersCount,
3684 const VkCmdBuffer* pCmdBuffers)
3685{
3686 stub();
3687}
3688
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003689void vkCmdDbgMarkerBegin(
3690 VkCmdBuffer cmdBuffer,
3691 const char* pMarker)
3692 __attribute__ ((visibility ("default")));
3693
3694void vkCmdDbgMarkerEnd(
3695 VkCmdBuffer cmdBuffer)
3696 __attribute__ ((visibility ("default")));
3697
3698VkResult vkDbgSetObjectTag(
3699 VkDevice device,
3700 VkObject object,
3701 size_t tagSize,
3702 const void* pTag)
3703 __attribute__ ((visibility ("default")));
3704
3705
3706void vkCmdDbgMarkerBegin(
3707 VkCmdBuffer cmdBuffer,
3708 const char* pMarker)
3709{
3710}
3711
3712void vkCmdDbgMarkerEnd(
3713 VkCmdBuffer cmdBuffer)
3714{
3715}
3716
3717VkResult vkDbgSetObjectTag(
3718 VkDevice device,
3719 VkObject object,
3720 size_t tagSize,
3721 const void* pTag)
3722{
3723 return VK_SUCCESS;
3724}