blob: de68fa551a5f79a05ceb518765c65ff9b475cc06 [file] [log] [blame]
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001/*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26#include <string.h>
27#include <unistd.h>
28#include <fcntl.h>
29
30#include "private.h"
31
32static int
33anv_env_get_int(const char *name)
34{
35 const char *val = getenv(name);
36
37 if (!val)
38 return 0;
39
40 return strtol(val, NULL, 0);
41}
42
43static VkResult
44fill_physical_device(struct anv_physical_device *device,
45 struct anv_instance *instance,
46 const char *path)
47{
48 int fd;
49
50 fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
51 if (fd < 0)
52 return vk_error(VK_ERROR_UNAVAILABLE);
53
54 device->instance = instance;
55 device->path = path;
56
57 device->chipset_id = anv_env_get_int("INTEL_DEVID_OVERRIDE");
58 device->no_hw = false;
59 if (device->chipset_id) {
60 /* INTEL_DEVID_OVERRIDE implies INTEL_NO_HW. */
61 device->no_hw = true;
62 } else {
63 device->chipset_id = anv_gem_get_param(fd, I915_PARAM_CHIPSET_ID);
64 }
65 if (!device->chipset_id)
66 goto fail;
67
68 device->name = brw_get_device_name(device->chipset_id);
69 device->info = brw_get_device_info(device->chipset_id, -1);
70 if (!device->info)
71 goto fail;
72
73 if (!anv_gem_get_param(fd, I915_PARAM_HAS_WAIT_TIMEOUT))
74 goto fail;
75
76 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXECBUF2))
77 goto fail;
78
79 if (!anv_gem_get_param(fd, I915_PARAM_HAS_LLC))
80 goto fail;
81
82 if (!anv_gem_get_param(fd, I915_PARAM_HAS_EXEC_CONSTANTS))
83 goto fail;
84
85 close(fd);
86
87 return VK_SUCCESS;
88
89 fail:
90 close(fd);
91
92 return vk_error(VK_ERROR_UNAVAILABLE);
93}
94
95static void *default_alloc(
96 void* pUserData,
97 size_t size,
98 size_t alignment,
99 VkSystemAllocType allocType)
100{
101 return malloc(size);
102}
103
104static void default_free(
105 void* pUserData,
106 void* pMem)
107{
108 free(pMem);
109}
110
111static const VkAllocCallbacks default_alloc_callbacks = {
112 .pUserData = NULL,
113 .pfnAlloc = default_alloc,
114 .pfnFree = default_free
115};
116
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700117VkResult anv_CreateInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700118 const VkInstanceCreateInfo* pCreateInfo,
119 VkInstance* pInstance)
120{
121 struct anv_instance *instance;
122 const VkAllocCallbacks *alloc_callbacks = &default_alloc_callbacks;
123 void *user_data = NULL;
124 VkResult result;
125
126 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
127
128 if (pCreateInfo->pAllocCb) {
129 alloc_callbacks = pCreateInfo->pAllocCb;
130 user_data = pCreateInfo->pAllocCb->pUserData;
131 }
132 instance = alloc_callbacks->pfnAlloc(user_data, sizeof(*instance), 8,
133 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
134 if (!instance)
135 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
136
137 instance->pAllocUserData = alloc_callbacks->pUserData;
138 instance->pfnAlloc = alloc_callbacks->pfnAlloc;
139 instance->pfnFree = alloc_callbacks->pfnFree;
140 instance->apiVersion = pCreateInfo->pAppInfo->apiVersion;
141
142 instance->physicalDeviceCount = 0;
143 result = fill_physical_device(&instance->physicalDevice,
144 instance, "/dev/dri/renderD128");
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700145
Chad Versacea61f3072015-05-20 19:51:10 -0700146 if (result != VK_SUCCESS)
147 return result;
148
149 instance->physicalDeviceCount++;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700150 *pInstance = (VkInstance) instance;
151
152 return VK_SUCCESS;
153}
154
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700155VkResult anv_DestroyInstance(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700156 VkInstance _instance)
157{
158 struct anv_instance *instance = (struct anv_instance *) _instance;
159
160 instance->pfnFree(instance->pAllocUserData, instance);
161
162 return VK_SUCCESS;
163}
164
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700165VkResult anv_EnumeratePhysicalDevices(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700166 VkInstance _instance,
167 uint32_t* pPhysicalDeviceCount,
168 VkPhysicalDevice* pPhysicalDevices)
169{
170 struct anv_instance *instance = (struct anv_instance *) _instance;
171
172 if (*pPhysicalDeviceCount >= 1)
173 pPhysicalDevices[0] = (VkPhysicalDevice) &instance->physicalDevice;
174 *pPhysicalDeviceCount = instance->physicalDeviceCount;
175
176 return VK_SUCCESS;
177}
178
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700179VkResult anv_GetPhysicalDeviceInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700180 VkPhysicalDevice physicalDevice,
181 VkPhysicalDeviceInfoType infoType,
182 size_t* pDataSize,
183 void* pData)
184{
185 struct anv_physical_device *device = (struct anv_physical_device *) physicalDevice;
186 VkPhysicalDeviceProperties *properties;
187 VkPhysicalDevicePerformance *performance;
188 VkPhysicalDeviceQueueProperties *queue_properties;
189 VkPhysicalDeviceMemoryProperties *memory_properties;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700190 VkDisplayPropertiesWSI *display_properties;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700191 uint64_t ns_per_tick = 80;
192
Kristian Høgsberga29df712015-05-15 22:04:52 -0700193 switch ((uint32_t) infoType) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700194 case VK_PHYSICAL_DEVICE_INFO_TYPE_PROPERTIES:
195 properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700196
197 *pDataSize = sizeof(*properties);
198 if (pData == NULL)
199 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700200
201 properties->apiVersion = 1;
202 properties->driverVersion = 1;
203 properties->vendorId = 0x8086;
204 properties->deviceId = device->chipset_id;
205 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
206 strcpy(properties->deviceName, device->name);
207 properties->maxInlineMemoryUpdateSize = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700208 properties->maxBoundDescriptorSets = MAX_SETS;
209 properties->maxThreadGroupSize = 512;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700210 properties->timestampFrequency = 1000 * 1000 * 1000 / ns_per_tick;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700211 properties->multiColorAttachmentClears = true;
212 properties->maxDescriptorSets = 8;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700213 properties->maxViewports = 16;
214 properties->maxColorAttachments = 8;
215 return VK_SUCCESS;
216
217 case VK_PHYSICAL_DEVICE_INFO_TYPE_PERFORMANCE:
218 performance = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700219
220 *pDataSize = sizeof(*performance);
221 if (pData == NULL)
222 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700223
224 performance->maxDeviceClock = 1.0;
225 performance->aluPerClock = 1.0;
226 performance->texPerClock = 1.0;
227 performance->primsPerClock = 1.0;
228 performance->pixelsPerClock = 1.0;
229 return VK_SUCCESS;
230
231 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PROPERTIES:
232 queue_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700233
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700234 *pDataSize = sizeof(*queue_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700235 if (pData == NULL)
236 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700237
238 queue_properties->queueFlags = 0;
239 queue_properties->queueCount = 1;
240 queue_properties->maxAtomicCounters = 0;
Kristian Høgsberg5286ef72015-05-18 10:17:53 -0700241 queue_properties->supportsTimestamps = true;
242 queue_properties->maxMemReferences = 256;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700243 return VK_SUCCESS;
244
245 case VK_PHYSICAL_DEVICE_INFO_TYPE_MEMORY_PROPERTIES:
246 memory_properties = pData;
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700247
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700248 *pDataSize = sizeof(*memory_properties);
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700249 if (pData == NULL)
250 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700251
252 memory_properties->supportsMigration = false;
253 memory_properties->supportsPinning = false;
254 return VK_SUCCESS;
255
Kristian Høgsberga29df712015-05-15 22:04:52 -0700256 case VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI:
257 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_DISPLAY_PROPERTIES_WSI");
258
259 *pDataSize = sizeof(*display_properties);
260 if (pData == NULL)
261 return VK_SUCCESS;
262
263 display_properties = pData;
264 display_properties->display = 0;
265 display_properties->physicalResolution = (VkExtent2D) { 0, 0 };
266 return VK_SUCCESS;
267
268 case VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI:
269 anv_finishme("VK_PHYSICAL_DEVICE_INFO_TYPE_QUEUE_PRESENT_PROPERTIES_WSI");
270 return VK_SUCCESS;
271
272
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700273 default:
274 return VK_UNSUPPORTED;
275 }
276
277}
278
279void * vkGetProcAddr(
280 VkPhysicalDevice physicalDevice,
281 const char* pName)
282{
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700283 return anv_lookup_entrypoint(pName);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700284}
285
286static void
287parse_debug_flags(struct anv_device *device)
288{
289 const char *debug, *p, *end;
290
291 debug = getenv("INTEL_DEBUG");
292 device->dump_aub = false;
293 if (debug) {
294 for (p = debug; *p; p = end + 1) {
295 end = strchrnul(p, ',');
296 if (end - p == 3 && memcmp(p, "aub", 3) == 0)
297 device->dump_aub = true;
298 if (end - p == 5 && memcmp(p, "no_hw", 5) == 0)
299 device->no_hw = true;
300 if (*end == '\0')
301 break;
302 }
303 }
304}
305
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700306VkResult anv_CreateDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700307 VkPhysicalDevice _physicalDevice,
308 const VkDeviceCreateInfo* pCreateInfo,
309 VkDevice* pDevice)
310{
311 struct anv_physical_device *physicalDevice =
312 (struct anv_physical_device *) _physicalDevice;
313 struct anv_instance *instance = physicalDevice->instance;
314 struct anv_device *device;
315
316 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
317
318 device = instance->pfnAlloc(instance->pAllocUserData,
319 sizeof(*device), 8,
320 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
321 if (!device)
322 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
323
324 device->no_hw = physicalDevice->no_hw;
325 parse_debug_flags(device);
326
327 device->instance = physicalDevice->instance;
328 device->fd = open("/dev/dri/renderD128", O_RDWR | O_CLOEXEC);
329 if (device->fd == -1)
330 goto fail_device;
331
332 device->context_id = anv_gem_create_context(device);
333 if (device->context_id == -1)
334 goto fail_fd;
335
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700336 anv_block_pool_init(&device->dynamic_state_block_pool, device, 2048);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700337
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700338 anv_state_pool_init(&device->dynamic_state_pool,
339 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700340
341 anv_block_pool_init(&device->instruction_block_pool, device, 2048);
342 anv_block_pool_init(&device->surface_state_block_pool, device, 2048);
343
Jason Ekstrand923691c2015-05-18 19:56:32 -0700344
345 /* Binding table pointers are only 16 bits so we have to make sure that
346 * they get allocated at the beginning of the surface state BO. To
347 * handle this, we create a separate block pool that works out of the
348 * first 64 KB of the surface state BO.
349 */
350 anv_block_pool_init_slave(&device->binding_table_block_pool,
351 &device->surface_state_block_pool, 32);
352
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700353 anv_state_pool_init(&device->surface_state_pool,
354 &device->surface_state_block_pool);
355
356 device->compiler = anv_compiler_create(device->fd);
357 device->aub_writer = NULL;
358
359 device->info = *physicalDevice->info;
360
361 pthread_mutex_init(&device->mutex, NULL);
362
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -0700363 anv_device_init_meta(device);
364
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700365 *pDevice = (VkDevice) device;
366
367 return VK_SUCCESS;
368
369 fail_fd:
370 close(device->fd);
371 fail_device:
372 anv_device_free(device, device);
373
374 return vk_error(VK_ERROR_UNAVAILABLE);
375}
376
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700377VkResult anv_DestroyDevice(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700378 VkDevice _device)
379{
380 struct anv_device *device = (struct anv_device *) _device;
381
382 anv_compiler_destroy(device->compiler);
383
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700384 anv_block_pool_finish(&device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700385 anv_block_pool_finish(&device->instruction_block_pool);
386 anv_block_pool_finish(&device->surface_state_block_pool);
387
388 close(device->fd);
389
390 if (device->aub_writer)
391 anv_aub_writer_destroy(device->aub_writer);
392
393 anv_device_free(device, device);
394
395 return VK_SUCCESS;
396}
397
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700398VkResult anv_GetGlobalExtensionInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700399 VkExtensionInfoType infoType,
400 uint32_t extensionIndex,
401 size_t* pDataSize,
402 void* pData)
403{
Kristian Høgsberga29df712015-05-15 22:04:52 -0700404 static const VkExtensionProperties extensions[] = {
405 {
406 .extName = "VK_WSI_LunarG",
407 .version = 3
408 }
409 };
410 uint32_t count = ARRAY_SIZE(extensions);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700411
412 switch (infoType) {
413 case VK_EXTENSION_INFO_TYPE_COUNT:
Kristian Høgsberga29df712015-05-15 22:04:52 -0700414 memcpy(pData, &count, sizeof(count));
415 *pDataSize = sizeof(count);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700416 return VK_SUCCESS;
Kristian Høgsberga29df712015-05-15 22:04:52 -0700417
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700418 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
Kristian Høgsberga29df712015-05-15 22:04:52 -0700419 if (extensionIndex >= count)
420 return vk_error(VK_ERROR_INVALID_EXTENSION);
421
422 memcpy(pData, &extensions[extensionIndex], sizeof(extensions[0]));
423 *pDataSize = sizeof(extensions[0]);
424 return VK_SUCCESS;
425
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700426 default:
427 return VK_UNSUPPORTED;
428 }
429}
430
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700431VkResult anv_GetPhysicalDeviceExtensionInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700432 VkPhysicalDevice physicalDevice,
433 VkExtensionInfoType infoType,
434 uint32_t extensionIndex,
435 size_t* pDataSize,
436 void* pData)
437{
438 uint32_t *count;
439
440 switch (infoType) {
441 case VK_EXTENSION_INFO_TYPE_COUNT:
Kristian Høgsberg783e6212015-05-17 19:22:52 -0700442 *pDataSize = 4;
443 if (pData == NULL)
444 return VK_SUCCESS;
445
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700446 count = pData;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700447 *count = 0;
448 return VK_SUCCESS;
449
450 case VK_EXTENSION_INFO_TYPE_PROPERTIES:
451 return vk_error(VK_ERROR_INVALID_EXTENSION);
452
453 default:
454 return VK_UNSUPPORTED;
455 }
456}
457
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700458VkResult anv_EnumerateLayers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700459 VkPhysicalDevice physicalDevice,
460 size_t maxStringSize,
461 size_t* pLayerCount,
462 char* const* pOutLayers,
463 void* pReserved)
464{
465 *pLayerCount = 0;
466
467 return VK_SUCCESS;
468}
469
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700470VkResult anv_GetDeviceQueue(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700471 VkDevice _device,
472 uint32_t queueNodeIndex,
473 uint32_t queueIndex,
474 VkQueue* pQueue)
475{
476 struct anv_device *device = (struct anv_device *) _device;
477 struct anv_queue *queue;
478
479 /* FIXME: Should allocate these at device create time. */
480
481 queue = anv_device_alloc(device, sizeof(*queue), 8,
482 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
483 if (queue == NULL)
484 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
485
486 queue->device = device;
487 queue->pool = &device->surface_state_pool;
488
489 queue->completed_serial = anv_state_pool_alloc(queue->pool, 4, 4);
490 *(uint32_t *)queue->completed_serial.map = 0;
491 queue->next_serial = 1;
492
493 *pQueue = (VkQueue) queue;
494
495 return VK_SUCCESS;
496}
497
498static const uint32_t BATCH_SIZE = 8192;
499
500VkResult
501anv_batch_init(struct anv_batch *batch, struct anv_device *device)
502{
503 VkResult result;
504
505 result = anv_bo_init_new(&batch->bo, device, BATCH_SIZE);
506 if (result != VK_SUCCESS)
507 return result;
508
509 batch->bo.map =
510 anv_gem_mmap(device, batch->bo.gem_handle, 0, BATCH_SIZE);
511 if (batch->bo.map == NULL) {
Kristian Høgsberga1bd4262015-05-19 14:14:24 -0700512 result = vk_error(VK_ERROR_MEMORY_MAP_FAILED);
513 goto fail_bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700514 }
515
516 batch->cmd_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700517 batch->next = batch->bo.map;
518
519 return VK_SUCCESS;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -0700520
521 fail_bo:
522 anv_gem_close(device, batch->bo.gem_handle);
523
524 return result;
525
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700526}
527
528void
529anv_batch_finish(struct anv_batch *batch, struct anv_device *device)
530{
531 anv_gem_munmap(batch->bo.map, BATCH_SIZE);
532 anv_gem_close(device, batch->bo.gem_handle);
533}
534
535void
536anv_batch_reset(struct anv_batch *batch)
537{
538 batch->next = batch->bo.map;
539 batch->cmd_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700540}
541
542void *
543anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords)
544{
545 void *p = batch->next;
546
547 batch->next += num_dwords * 4;
548
549 return p;
550}
551
552static void
553anv_reloc_list_append(struct anv_reloc_list *list,
554 struct anv_reloc_list *other, uint32_t offset)
555{
556 uint32_t i, count;
557
558 count = list->num_relocs;
559 memcpy(&list->relocs[count], &other->relocs[0],
560 other->num_relocs * sizeof(other->relocs[0]));
561 memcpy(&list->reloc_bos[count], &other->reloc_bos[0],
562 other->num_relocs * sizeof(other->reloc_bos[0]));
563 for (i = 0; i < other->num_relocs; i++)
564 list->relocs[i + count].offset += offset;
565
566 count += other->num_relocs;
567}
568
569static uint64_t
570anv_reloc_list_add(struct anv_reloc_list *list,
571 uint32_t offset,
572 struct anv_bo *target_bo, uint32_t delta)
573{
574 struct drm_i915_gem_relocation_entry *entry;
575 int index;
576
577 assert(list->num_relocs < ANV_BATCH_MAX_RELOCS);
578
579 /* XXX: Can we use I915_EXEC_HANDLE_LUT? */
580 index = list->num_relocs++;
581 list->reloc_bos[index] = target_bo;
582 entry = &list->relocs[index];
583 entry->target_handle = target_bo->gem_handle;
584 entry->delta = delta;
585 entry->offset = offset;
586 entry->presumed_offset = target_bo->offset;
587 entry->read_domains = 0;
588 entry->write_domain = 0;
589
590 return target_bo->offset + delta;
591}
592
593void
594anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other)
595{
596 uint32_t size, offset;
597
598 size = other->next - other->bo.map;
599 memcpy(batch->next, other->bo.map, size);
600
601 offset = batch->next - batch->bo.map;
602 anv_reloc_list_append(&batch->cmd_relocs, &other->cmd_relocs, offset);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700603
604 batch->next += size;
605}
606
607uint64_t
608anv_batch_emit_reloc(struct anv_batch *batch,
609 void *location, struct anv_bo *bo, uint32_t delta)
610{
611 return anv_reloc_list_add(&batch->cmd_relocs,
612 location - batch->bo.map, bo, delta);
613}
614
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700615VkResult anv_QueueSubmit(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700616 VkQueue _queue,
617 uint32_t cmdBufferCount,
618 const VkCmdBuffer* pCmdBuffers,
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700619 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700620{
621 struct anv_queue *queue = (struct anv_queue *) _queue;
622 struct anv_device *device = queue->device;
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700623 struct anv_fence *fence = (struct anv_fence *) _fence;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700624 int ret;
625
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700626 for (uint32_t i = 0; i < cmdBufferCount; i++) {
627 struct anv_cmd_buffer *cmd_buffer =
628 (struct anv_cmd_buffer *) pCmdBuffers[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700629
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700630 if (device->dump_aub)
631 anv_cmd_buffer_dump(cmd_buffer);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700632
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700633 if (!device->no_hw) {
634 ret = anv_gem_execbuffer(device, &cmd_buffer->execbuf);
635 if (ret != 0)
Kristian Høgsberg2b7a0602015-05-12 14:38:58 -0700636 return vk_error(VK_ERROR_UNKNOWN);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700637
Kristian Høgsberg6afb2642015-05-18 08:49:15 -0700638 if (fence) {
639 ret = anv_gem_execbuffer(device, &fence->execbuf);
640 if (ret != 0)
641 return vk_error(VK_ERROR_UNKNOWN);
642 }
643
Kristian Høgsbergcb986ef2015-05-12 14:38:12 -0700644 for (uint32_t i = 0; i < cmd_buffer->bo_count; i++)
645 cmd_buffer->exec2_bos[i]->offset = cmd_buffer->exec2_objects[i].offset;
646 } else {
647 *(uint32_t *)queue->completed_serial.map = cmd_buffer->serial;
648 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700649 }
650
651 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700652}
653
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700654VkResult anv_QueueAddMemReferences(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700655 VkQueue queue,
656 uint32_t count,
657 const VkDeviceMemory* pMems)
658{
659 return VK_SUCCESS;
660}
661
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700662VkResult anv_QueueRemoveMemReferences(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700663 VkQueue queue,
664 uint32_t count,
665 const VkDeviceMemory* pMems)
666{
667 return VK_SUCCESS;
668}
669
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700670VkResult anv_QueueWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700671 VkQueue _queue)
672{
673 struct anv_queue *queue = (struct anv_queue *) _queue;
674
675 return vkDeviceWaitIdle((VkDevice) queue->device);
676}
677
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700678VkResult anv_DeviceWaitIdle(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700679 VkDevice _device)
680{
681 struct anv_device *device = (struct anv_device *) _device;
682 struct anv_state state;
683 struct anv_batch batch;
684 struct drm_i915_gem_execbuffer2 execbuf;
685 struct drm_i915_gem_exec_object2 exec2_objects[1];
686 struct anv_bo *bo = NULL;
687 VkResult result;
688 int64_t timeout;
689 int ret;
690
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700691 state = anv_state_pool_alloc(&device->dynamic_state_pool, 32, 32);
692 bo = &device->dynamic_state_pool.block_pool->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700693 batch.next = state.map;
694 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
695 anv_batch_emit(&batch, GEN8_MI_NOOP);
696
697 exec2_objects[0].handle = bo->gem_handle;
698 exec2_objects[0].relocation_count = 0;
699 exec2_objects[0].relocs_ptr = 0;
700 exec2_objects[0].alignment = 0;
701 exec2_objects[0].offset = bo->offset;
702 exec2_objects[0].flags = 0;
703 exec2_objects[0].rsvd1 = 0;
704 exec2_objects[0].rsvd2 = 0;
705
706 execbuf.buffers_ptr = (uintptr_t) exec2_objects;
707 execbuf.buffer_count = 1;
708 execbuf.batch_start_offset = state.offset;
709 execbuf.batch_len = batch.next - state.map;
710 execbuf.cliprects_ptr = 0;
711 execbuf.num_cliprects = 0;
712 execbuf.DR1 = 0;
713 execbuf.DR4 = 0;
714
715 execbuf.flags =
716 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
717 execbuf.rsvd1 = device->context_id;
718 execbuf.rsvd2 = 0;
719
720 if (!device->no_hw) {
721 ret = anv_gem_execbuffer(device, &execbuf);
722 if (ret != 0) {
723 result = vk_error(VK_ERROR_UNKNOWN);
724 goto fail;
725 }
726
727 timeout = INT64_MAX;
728 ret = anv_gem_wait(device, bo->gem_handle, &timeout);
729 if (ret != 0) {
730 result = vk_error(VK_ERROR_UNKNOWN);
731 goto fail;
732 }
733 }
734
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700735 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700736
737 return VK_SUCCESS;
738
739 fail:
Kristian Høgsberg0a775e12015-05-13 15:34:34 -0700740 anv_state_pool_free(&device->dynamic_state_pool, state);
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700741
742 return result;
743}
744
745void *
746anv_device_alloc(struct anv_device * device,
747 size_t size,
748 size_t alignment,
749 VkSystemAllocType allocType)
750{
751 return device->instance->pfnAlloc(device->instance->pAllocUserData,
752 size,
753 alignment,
754 allocType);
755}
756
757void
758anv_device_free(struct anv_device * device,
759 void * mem)
760{
761 return device->instance->pfnFree(device->instance->pAllocUserData,
762 mem);
763}
764
765VkResult
766anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size)
767{
768 bo->gem_handle = anv_gem_create(device, size);
769 if (!bo->gem_handle)
770 return vk_error(VK_ERROR_OUT_OF_DEVICE_MEMORY);
771
772 bo->map = NULL;
773 bo->index = 0;
774 bo->offset = 0;
775 bo->size = size;
776
777 return VK_SUCCESS;
778}
779
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700780VkResult anv_AllocMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700781 VkDevice _device,
782 const VkMemoryAllocInfo* pAllocInfo,
783 VkDeviceMemory* pMem)
784{
785 struct anv_device *device = (struct anv_device *) _device;
786 struct anv_device_memory *mem;
787 VkResult result;
788
789 assert(pAllocInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOC_INFO);
790
791 mem = anv_device_alloc(device, sizeof(*mem), 8,
792 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
793 if (mem == NULL)
794 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
795
796 result = anv_bo_init_new(&mem->bo, device, pAllocInfo->allocationSize);
797 if (result != VK_SUCCESS)
798 goto fail;
799
800 *pMem = (VkDeviceMemory) mem;
801
802 return VK_SUCCESS;
803
804 fail:
805 anv_device_free(device, mem);
806
807 return result;
808}
809
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700810VkResult anv_FreeMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700811 VkDevice _device,
812 VkDeviceMemory _mem)
813{
814 struct anv_device *device = (struct anv_device *) _device;
815 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
816
817 if (mem->bo.map)
818 anv_gem_munmap(mem->bo.map, mem->bo.size);
819
820 if (mem->bo.gem_handle != 0)
821 anv_gem_close(device, mem->bo.gem_handle);
822
823 anv_device_free(device, mem);
824
825 return VK_SUCCESS;
826}
827
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700828VkResult anv_SetMemoryPriority(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700829 VkDevice device,
830 VkDeviceMemory mem,
831 VkMemoryPriority priority)
832{
833 return VK_SUCCESS;
834}
835
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700836VkResult anv_MapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700837 VkDevice _device,
838 VkDeviceMemory _mem,
839 VkDeviceSize offset,
840 VkDeviceSize size,
841 VkMemoryMapFlags flags,
842 void** ppData)
843{
844 struct anv_device *device = (struct anv_device *) _device;
845 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
846
847 /* FIXME: Is this supposed to be thread safe? Since vkUnmapMemory() only
848 * takes a VkDeviceMemory pointer, it seems like only one map of the memory
849 * at a time is valid. We could just mmap up front and return an offset
850 * pointer here, but that may exhaust virtual memory on 32 bit
851 * userspace. */
852
853 mem->map = anv_gem_mmap(device, mem->bo.gem_handle, offset, size);
854 mem->map_size = size;
855
856 *ppData = mem->map;
857
858 return VK_SUCCESS;
859}
860
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700861VkResult anv_UnmapMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700862 VkDevice _device,
863 VkDeviceMemory _mem)
864{
865 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
866
867 anv_gem_munmap(mem->map, mem->map_size);
868
869 return VK_SUCCESS;
870}
871
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700872VkResult anv_FlushMappedMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700873 VkDevice device,
874 VkDeviceMemory mem,
875 VkDeviceSize offset,
876 VkDeviceSize size)
877{
878 /* clflush here for !llc platforms */
879
880 return VK_SUCCESS;
881}
882
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700883VkResult anv_PinSystemMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700884 VkDevice device,
885 const void* pSysMem,
886 size_t memSize,
887 VkDeviceMemory* pMem)
888{
889 return VK_SUCCESS;
890}
891
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700892VkResult anv_GetMultiDeviceCompatibility(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700893 VkPhysicalDevice physicalDevice0,
894 VkPhysicalDevice physicalDevice1,
895 VkPhysicalDeviceCompatibilityInfo* pInfo)
896{
897 return VK_UNSUPPORTED;
898}
899
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700900VkResult anv_OpenSharedMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700901 VkDevice device,
902 const VkMemoryOpenInfo* pOpenInfo,
903 VkDeviceMemory* pMem)
904{
905 return VK_UNSUPPORTED;
906}
907
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700908VkResult anv_OpenSharedSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700909 VkDevice device,
910 const VkSemaphoreOpenInfo* pOpenInfo,
911 VkSemaphore* pSemaphore)
912{
913 return VK_UNSUPPORTED;
914}
915
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700916VkResult anv_OpenPeerMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700917 VkDevice device,
918 const VkPeerMemoryOpenInfo* pOpenInfo,
919 VkDeviceMemory* pMem)
920{
921 return VK_UNSUPPORTED;
922}
923
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700924VkResult anv_OpenPeerImage(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700925 VkDevice device,
926 const VkPeerImageOpenInfo* pOpenInfo,
927 VkImage* pImage,
928 VkDeviceMemory* pMem)
929{
930 return VK_UNSUPPORTED;
931}
932
Kristian Høgsberg454345d2015-05-17 16:33:48 -0700933VkResult anv_DestroyObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700934 VkDevice _device,
935 VkObjectType objType,
Jason Ekstrand57153da2015-05-22 15:15:08 -0700936 VkObject _object)
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700937{
938 struct anv_device *device = (struct anv_device *) _device;
Jason Ekstrand57153da2015-05-22 15:15:08 -0700939 struct anv_object *object = (struct anv_object *) _object;
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700940
Jason Ekstrand57153da2015-05-22 15:15:08 -0700941 switch (objType) {
942 case VK_OBJECT_TYPE_INSTANCE:
943 return anv_DestroyInstance((VkInstance) _object);
944
945 case VK_OBJECT_TYPE_PHYSICAL_DEVICE:
946 /* We don't want to actually destroy physical devices */
947 return VK_SUCCESS;
948
949 case VK_OBJECT_TYPE_DEVICE:
950 assert(_device == (VkDevice) _object);
951 return anv_DestroyDevice((VkDevice) _object);
952
953 case VK_OBJECT_TYPE_QUEUE:
954 /* TODO */
955 return VK_SUCCESS;
956
957 case VK_OBJECT_TYPE_DEVICE_MEMORY:
958 return anv_FreeMemory(_device, (VkDeviceMemory) _object);
959
960 case VK_OBJECT_TYPE_DESCRIPTOR_POOL:
961 /* These are just dummys anyway, so we don't need to destroy them */
962 return VK_SUCCESS;
963
964 case VK_OBJECT_TYPE_BUFFER:
965 case VK_OBJECT_TYPE_BUFFER_VIEW:
966 case VK_OBJECT_TYPE_IMAGE:
967 case VK_OBJECT_TYPE_IMAGE_VIEW:
968 case VK_OBJECT_TYPE_COLOR_ATTACHMENT_VIEW:
969 case VK_OBJECT_TYPE_DEPTH_STENCIL_VIEW:
970 case VK_OBJECT_TYPE_SHADER:
971 case VK_OBJECT_TYPE_PIPELINE_LAYOUT:
972 case VK_OBJECT_TYPE_SAMPLER:
973 case VK_OBJECT_TYPE_DESCRIPTOR_SET:
974 case VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT:
975 case VK_OBJECT_TYPE_DYNAMIC_RS_STATE:
976 case VK_OBJECT_TYPE_DYNAMIC_CB_STATE:
977 case VK_OBJECT_TYPE_DYNAMIC_DS_STATE:
978 case VK_OBJECT_TYPE_RENDER_PASS:
979 /* These are trivially destroyable */
980 anv_device_free(device, (void *) _object);
981 return VK_SUCCESS;
982
983 case VK_OBJECT_TYPE_COMMAND_BUFFER:
984 case VK_OBJECT_TYPE_PIPELINE:
985 case VK_OBJECT_TYPE_DYNAMIC_VP_STATE:
986 case VK_OBJECT_TYPE_FENCE:
987 case VK_OBJECT_TYPE_QUERY_POOL:
988 case VK_OBJECT_TYPE_FRAMEBUFFER:
989 (object->destructor)(device, object, objType);
990 return VK_SUCCESS;
991
992 case VK_OBJECT_TYPE_SEMAPHORE:
993 case VK_OBJECT_TYPE_EVENT:
994 stub_return(VK_UNSUPPORTED);
995
996 default:
997 unreachable("Invalid object type");
998 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -0700999}
1000
1001static void
1002fill_memory_requirements(
1003 VkObjectType objType,
1004 VkObject object,
1005 VkMemoryRequirements * memory_requirements)
1006{
1007 struct anv_buffer *buffer;
1008 struct anv_image *image;
1009
1010 memory_requirements->memPropsAllowed =
1011 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1012 VK_MEMORY_PROPERTY_HOST_DEVICE_COHERENT_BIT |
1013 /* VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT | */
1014 VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT |
1015 VK_MEMORY_PROPERTY_PREFER_HOST_LOCAL |
1016 VK_MEMORY_PROPERTY_SHAREABLE_BIT;
1017
1018 memory_requirements->memPropsRequired = 0;
1019
1020 switch (objType) {
1021 case VK_OBJECT_TYPE_BUFFER:
1022 buffer = (struct anv_buffer *) object;
1023 memory_requirements->size = buffer->size;
1024 memory_requirements->alignment = 16;
1025 break;
1026 case VK_OBJECT_TYPE_IMAGE:
1027 image = (struct anv_image *) object;
1028 memory_requirements->size = image->size;
1029 memory_requirements->alignment = image->alignment;
1030 break;
1031 default:
1032 memory_requirements->size = 0;
1033 break;
1034 }
1035}
1036
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001037static uint32_t
1038get_allocation_count(VkObjectType objType)
1039{
1040 switch (objType) {
1041 case VK_OBJECT_TYPE_BUFFER:
1042 case VK_OBJECT_TYPE_IMAGE:
1043 return 1;
1044 default:
1045 return 0;
1046 }
1047}
1048
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001049VkResult anv_GetObjectInfo(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001050 VkDevice _device,
1051 VkObjectType objType,
1052 VkObject object,
1053 VkObjectInfoType infoType,
1054 size_t* pDataSize,
1055 void* pData)
1056{
1057 VkMemoryRequirements memory_requirements;
Kristian Høgsberg05754542015-05-18 08:50:04 -07001058 uint32_t *count;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001059
1060 switch (infoType) {
1061 case VK_OBJECT_INFO_TYPE_MEMORY_REQUIREMENTS:
Kristian Høgsberg783e6212015-05-17 19:22:52 -07001062 *pDataSize = sizeof(memory_requirements);
1063 if (pData == NULL)
1064 return VK_SUCCESS;
1065
Kristian Høgsberg05754542015-05-18 08:50:04 -07001066 fill_memory_requirements(objType, object, pData);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001067 return VK_SUCCESS;
1068
1069 case VK_OBJECT_INFO_TYPE_MEMORY_ALLOCATION_COUNT:
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001070 *pDataSize = sizeof(count);
1071 if (pData == NULL)
1072 return VK_SUCCESS;
1073
Kristian Høgsberg05754542015-05-18 08:50:04 -07001074 count = pData;
1075 *count = get_allocation_count(objType);
Kristian Høgsbergb7fac7a2015-05-17 19:25:28 -07001076 return VK_SUCCESS;
1077
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001078 default:
1079 return VK_UNSUPPORTED;
1080 }
1081
1082}
1083
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001084VkResult anv_QueueBindObjectMemory(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001085 VkQueue queue,
1086 VkObjectType objType,
1087 VkObject object,
1088 uint32_t allocationIdx,
1089 VkDeviceMemory _mem,
1090 VkDeviceSize memOffset)
1091{
1092 struct anv_buffer *buffer;
1093 struct anv_image *image;
1094 struct anv_device_memory *mem = (struct anv_device_memory *) _mem;
1095
1096 switch (objType) {
1097 case VK_OBJECT_TYPE_BUFFER:
1098 buffer = (struct anv_buffer *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001099 buffer->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001100 buffer->offset = memOffset;
1101 break;
1102 case VK_OBJECT_TYPE_IMAGE:
1103 image = (struct anv_image *) object;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001104 image->bo = &mem->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001105 image->offset = memOffset;
1106 break;
1107 default:
1108 break;
1109 }
1110
1111 return VK_SUCCESS;
1112}
1113
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001114VkResult anv_QueueBindObjectMemoryRange(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001115 VkQueue queue,
1116 VkObjectType objType,
1117 VkObject object,
1118 uint32_t allocationIdx,
1119 VkDeviceSize rangeOffset,
1120 VkDeviceSize rangeSize,
1121 VkDeviceMemory mem,
1122 VkDeviceSize memOffset)
1123{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001124 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001125}
1126
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001127VkResult anv_QueueBindImageMemoryRange(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001128 VkQueue queue,
1129 VkImage image,
1130 uint32_t allocationIdx,
1131 const VkImageMemoryBindInfo* pBindInfo,
1132 VkDeviceMemory mem,
1133 VkDeviceSize memOffset)
1134{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001135 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001136}
1137
Jason Ekstrand57153da2015-05-22 15:15:08 -07001138static void
1139anv_fence_destroy(struct anv_device *device,
1140 struct anv_object *object,
1141 VkObjectType obj_type)
1142{
1143 struct anv_fence *fence = (struct anv_fence *) object;
1144
1145 assert(obj_type == VK_OBJECT_TYPE_FENCE);
1146
1147 anv_gem_munmap(fence->bo.map, fence->bo.size);
1148 anv_gem_close(device, fence->bo.gem_handle);
1149 anv_device_free(device, fence);
1150}
1151
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001152VkResult anv_CreateFence(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001153 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001154 const VkFenceCreateInfo* pCreateInfo,
1155 VkFence* pFence)
1156{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001157 struct anv_device *device = (struct anv_device *) _device;
1158 struct anv_fence *fence;
1159 struct anv_batch batch;
1160 VkResult result;
1161
1162 const uint32_t fence_size = 128;
1163
1164 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FENCE_CREATE_INFO);
1165
1166 fence = anv_device_alloc(device, sizeof(*fence), 8,
1167 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1168 if (fence == NULL)
1169 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1170
1171 result = anv_bo_init_new(&fence->bo, device, fence_size);
1172 if (result != VK_SUCCESS)
1173 goto fail;
1174
Jason Ekstrand57153da2015-05-22 15:15:08 -07001175 fence->base.destructor = anv_fence_destroy;
1176
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001177 fence->bo.map =
1178 anv_gem_mmap(device, fence->bo.gem_handle, 0, fence->bo.size);
1179 batch.next = fence->bo.map;
1180 anv_batch_emit(&batch, GEN8_MI_BATCH_BUFFER_END);
1181 anv_batch_emit(&batch, GEN8_MI_NOOP);
1182
1183 fence->exec2_objects[0].handle = fence->bo.gem_handle;
1184 fence->exec2_objects[0].relocation_count = 0;
1185 fence->exec2_objects[0].relocs_ptr = 0;
1186 fence->exec2_objects[0].alignment = 0;
1187 fence->exec2_objects[0].offset = fence->bo.offset;
1188 fence->exec2_objects[0].flags = 0;
1189 fence->exec2_objects[0].rsvd1 = 0;
1190 fence->exec2_objects[0].rsvd2 = 0;
1191
1192 fence->execbuf.buffers_ptr = (uintptr_t) fence->exec2_objects;
1193 fence->execbuf.buffer_count = 1;
1194 fence->execbuf.batch_start_offset = 0;
1195 fence->execbuf.batch_len = batch.next - fence->bo.map;
1196 fence->execbuf.cliprects_ptr = 0;
1197 fence->execbuf.num_cliprects = 0;
1198 fence->execbuf.DR1 = 0;
1199 fence->execbuf.DR4 = 0;
1200
1201 fence->execbuf.flags =
1202 I915_EXEC_HANDLE_LUT | I915_EXEC_NO_RELOC | I915_EXEC_RENDER;
1203 fence->execbuf.rsvd1 = device->context_id;
1204 fence->execbuf.rsvd2 = 0;
1205
1206 *pFence = (VkQueryPool) fence;
1207
1208 return VK_SUCCESS;
1209
1210 fail:
1211 anv_device_free(device, fence);
1212
1213 return result;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001214}
1215
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001216VkResult anv_ResetFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001217 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001218 uint32_t fenceCount,
1219 VkFence* pFences)
1220{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001221 struct anv_fence **fences = (struct anv_fence **) pFences;
1222
1223 for (uint32_t i; i < fenceCount; i++)
1224 fences[i]->ready = false;
1225
1226 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001227}
1228
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001229VkResult anv_GetFenceStatus(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001230 VkDevice _device,
1231 VkFence _fence)
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001232{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001233 struct anv_device *device = (struct anv_device *) _device;
1234 struct anv_fence *fence = (struct anv_fence *) _fence;
1235 int64_t t = 0;
1236 int ret;
1237
1238 if (fence->ready)
1239 return VK_SUCCESS;
1240
1241 ret = anv_gem_wait(device, fence->bo.gem_handle, &t);
1242 if (ret == 0) {
1243 fence->ready = true;
1244 return VK_SUCCESS;
1245 }
1246
1247 return VK_NOT_READY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001248}
1249
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001250VkResult anv_WaitForFences(
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001251 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001252 uint32_t fenceCount,
1253 const VkFence* pFences,
1254 bool32_t waitAll,
1255 uint64_t timeout)
1256{
Kristian Høgsberg6afb2642015-05-18 08:49:15 -07001257 struct anv_device *device = (struct anv_device *) _device;
1258 struct anv_fence **fences = (struct anv_fence **) pFences;
1259 int64_t t = timeout;
1260 int ret;
1261
1262 /* FIXME: handle !waitAll */
1263
1264 for (uint32_t i = 0; i < fenceCount; i++) {
1265 ret = anv_gem_wait(device, fences[i]->bo.gem_handle, &t);
1266 if (ret == -1 && errno == ETIME)
1267 return VK_TIMEOUT;
1268 else if (ret == -1)
1269 return vk_error(VK_ERROR_UNKNOWN);
1270 }
1271
1272 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001273}
1274
1275// Queue semaphore functions
1276
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001277VkResult anv_CreateSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001278 VkDevice device,
1279 const VkSemaphoreCreateInfo* pCreateInfo,
1280 VkSemaphore* pSemaphore)
1281{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001282 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001283}
1284
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001285VkResult anv_QueueSignalSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001286 VkQueue queue,
1287 VkSemaphore semaphore)
1288{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001289 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001290}
1291
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001292VkResult anv_QueueWaitSemaphore(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001293 VkQueue queue,
1294 VkSemaphore semaphore)
1295{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001296 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001297}
1298
1299// Event functions
1300
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001301VkResult anv_CreateEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001302 VkDevice device,
1303 const VkEventCreateInfo* pCreateInfo,
1304 VkEvent* pEvent)
1305{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001306 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001307}
1308
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001309VkResult anv_GetEventStatus(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001310 VkDevice device,
1311 VkEvent event)
1312{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001313 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001314}
1315
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001316VkResult anv_SetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001317 VkDevice device,
1318 VkEvent event)
1319{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001320 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001321}
1322
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001323VkResult anv_ResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001324 VkDevice device,
1325 VkEvent event)
1326{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07001327 stub_return(VK_UNSUPPORTED);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001328}
1329
1330// Query functions
1331
Jason Ekstrand57153da2015-05-22 15:15:08 -07001332static void
1333anv_query_pool_destroy(struct anv_device *device,
1334 struct anv_object *object,
1335 VkObjectType obj_type)
1336{
1337 struct anv_query_pool *pool = (struct anv_query_pool *) object;
1338
1339 assert(obj_type == VK_OBJECT_TYPE_QUERY_POOL);
1340
1341 anv_gem_munmap(pool->bo.map, pool->bo.size);
1342 anv_gem_close(device, pool->bo.gem_handle);
1343 anv_device_free(device, pool);
1344}
1345
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001346VkResult anv_CreateQueryPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001347 VkDevice _device,
1348 const VkQueryPoolCreateInfo* pCreateInfo,
1349 VkQueryPool* pQueryPool)
1350{
1351 struct anv_device *device = (struct anv_device *) _device;
1352 struct anv_query_pool *pool;
1353 VkResult result;
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001354 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001355
1356 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO);
1357
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001358 switch (pCreateInfo->queryType) {
1359 case VK_QUERY_TYPE_OCCLUSION:
1360 break;
1361 case VK_QUERY_TYPE_PIPELINE_STATISTICS:
1362 return VK_UNSUPPORTED;
1363 default:
1364 unreachable("");
1365 }
1366
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001367 pool = anv_device_alloc(device, sizeof(*pool), 8,
1368 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1369 if (pool == NULL)
1370 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1371
Jason Ekstrand57153da2015-05-22 15:15:08 -07001372 pool->base.destructor = anv_query_pool_destroy;
1373
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001374 pool->type = pCreateInfo->queryType;
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001375 size = pCreateInfo->slots * sizeof(struct anv_query_pool_slot);
1376 result = anv_bo_init_new(&pool->bo, device, size);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001377 if (result != VK_SUCCESS)
1378 goto fail;
1379
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001380 pool->bo.map = anv_gem_mmap(device, pool->bo.gem_handle, 0, size);
1381
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001382 *pQueryPool = (VkQueryPool) pool;
1383
1384 return VK_SUCCESS;
1385
1386 fail:
1387 anv_device_free(device, pool);
1388
1389 return result;
1390}
1391
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001392VkResult anv_GetQueryPoolResults(
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001393 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001394 VkQueryPool queryPool,
1395 uint32_t startQuery,
1396 uint32_t queryCount,
1397 size_t* pDataSize,
1398 void* pData,
1399 VkQueryResultFlags flags)
1400{
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07001401 struct anv_device *device = (struct anv_device *) _device;
1402 struct anv_query_pool *pool = (struct anv_query_pool *) queryPool;
1403 struct anv_query_pool_slot *slot = pool->bo.map;
1404 int64_t timeout = INT64_MAX;
1405 uint32_t *dst32 = pData;
1406 uint64_t *dst64 = pData;
1407 uint64_t result;
1408 int ret;
1409
1410 if (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) {
1411 /* Where is the availabilty info supposed to go? */
1412 anv_finishme("VK_QUERY_RESULT_WITH_AVAILABILITY_BIT");
1413 return VK_UNSUPPORTED;
1414 }
1415
1416 assert(pool->type == VK_QUERY_TYPE_OCCLUSION);
1417
1418 if (flags & VK_QUERY_RESULT_64_BIT)
1419 *pDataSize = queryCount * sizeof(uint64_t);
1420 else
1421 *pDataSize = queryCount * sizeof(uint32_t);
1422
1423 if (pData == NULL)
1424 return VK_SUCCESS;
1425
1426 if (flags & VK_QUERY_RESULT_WAIT_BIT) {
1427 ret = anv_gem_wait(device, pool->bo.gem_handle, &timeout);
1428 if (ret == -1)
1429 return vk_error(VK_ERROR_UNKNOWN);
1430 }
1431
1432 for (uint32_t i = 0; i < queryCount; i++) {
1433 result = slot[startQuery + i].end - slot[startQuery + i].begin;
1434 if (flags & VK_QUERY_RESULT_64_BIT) {
1435 *dst64++ = result;
1436 } else {
1437 if (result > UINT32_MAX)
1438 result = UINT32_MAX;
1439 *dst32++ = result;
1440 }
1441 }
1442
1443 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001444}
1445
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001446// Buffer functions
1447
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001448VkResult anv_CreateBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001449 VkDevice _device,
1450 const VkBufferCreateInfo* pCreateInfo,
1451 VkBuffer* pBuffer)
1452{
1453 struct anv_device *device = (struct anv_device *) _device;
1454 struct anv_buffer *buffer;
1455
1456 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1457
1458 buffer = anv_device_alloc(device, sizeof(*buffer), 8,
1459 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1460 if (buffer == NULL)
1461 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1462
1463 buffer->size = pCreateInfo->size;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07001464 buffer->bo = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001465 buffer->offset = 0;
1466
1467 *pBuffer = (VkBuffer) buffer;
1468
1469 return VK_SUCCESS;
1470}
1471
1472// Buffer view functions
1473
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001474VkResult anv_CreateBufferView(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001475 VkDevice _device,
1476 const VkBufferViewCreateInfo* pCreateInfo,
1477 VkBufferView* pView)
1478{
1479 struct anv_device *device = (struct anv_device *) _device;
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001480 struct anv_buffer *buffer = (struct anv_buffer *) pCreateInfo->buffer;
1481 struct anv_surface_view *view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001482 const struct anv_format *format;
1483
1484 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO);
1485
1486 view = anv_device_alloc(device, sizeof(*view), 8,
1487 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1488 if (view == NULL)
1489 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1490
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001491 view->bo = buffer->bo;
1492 view->offset = buffer->offset + pCreateInfo->offset;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001493 view->surface_state =
1494 anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001495 view->format = pCreateInfo->format;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001496
1497 format = anv_format_for_vk_format(pCreateInfo->format);
1498 /* This assumes RGBA float format. */
1499 uint32_t stride = 4;
1500 uint32_t num_elements = pCreateInfo->range / stride;
1501 struct GEN8_RENDER_SURFACE_STATE surface_state = {
1502 .SurfaceType = SURFTYPE_BUFFER,
1503 .SurfaceArray = false,
1504 .SurfaceFormat = format->format,
1505 .SurfaceVerticalAlignment = VALIGN4,
1506 .SurfaceHorizontalAlignment = HALIGN4,
1507 .TileMode = LINEAR,
1508 .VerticalLineStride = 0,
1509 .VerticalLineStrideOffset = 0,
1510 .SamplerL2BypassModeDisable = true,
1511 .RenderCacheReadWriteMode = WriteOnlyCache,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07001512 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001513 .BaseMipLevel = 0,
1514 .SurfaceQPitch = 0,
1515 .Height = (num_elements >> 7) & 0x3fff,
1516 .Width = num_elements & 0x7f,
1517 .Depth = (num_elements >> 21) & 0x3f,
1518 .SurfacePitch = stride - 1,
1519 .MinimumArrayElement = 0,
1520 .NumberofMultisamples = MULTISAMPLECOUNT_1,
1521 .XOffset = 0,
1522 .YOffset = 0,
1523 .SurfaceMinLOD = 0,
1524 .MIPCountLOD = 0,
1525 .AuxiliarySurfaceMode = AUX_NONE,
1526 .RedClearColor = 0,
1527 .GreenClearColor = 0,
1528 .BlueClearColor = 0,
1529 .AlphaClearColor = 0,
1530 .ShaderChannelSelectRed = SCS_RED,
1531 .ShaderChannelSelectGreen = SCS_GREEN,
1532 .ShaderChannelSelectBlue = SCS_BLUE,
1533 .ShaderChannelSelectAlpha = SCS_ALPHA,
1534 .ResourceMinLOD = 0,
1535 /* FIXME: We assume that the image must be bound at this time. */
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001536 .SurfaceBaseAddress = { NULL, view->offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001537 };
1538
1539 GEN8_RENDER_SURFACE_STATE_pack(NULL, view->surface_state.map, &surface_state);
1540
1541 *pView = (VkImageView) view;
1542
1543 return VK_SUCCESS;
1544}
1545
1546// Sampler functions
1547
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001548VkResult anv_CreateSampler(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001549 VkDevice _device,
1550 const VkSamplerCreateInfo* pCreateInfo,
1551 VkSampler* pSampler)
1552{
1553 struct anv_device *device = (struct anv_device *) _device;
1554 struct anv_sampler *sampler;
1555
Kristian Høgsberg18acfa72015-05-13 13:53:01 -07001556 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001557
1558 sampler = anv_device_alloc(device, sizeof(*sampler), 8,
1559 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1560 if (!sampler)
1561 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1562
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001563 static const uint32_t vk_to_gen_tex_filter[] = {
1564 [VK_TEX_FILTER_NEAREST] = MAPFILTER_NEAREST,
1565 [VK_TEX_FILTER_LINEAR] = MAPFILTER_LINEAR
1566 };
1567
1568 static const uint32_t vk_to_gen_mipmap_mode[] = {
1569 [VK_TEX_MIPMAP_MODE_BASE] = MIPFILTER_NONE,
1570 [VK_TEX_MIPMAP_MODE_NEAREST] = MIPFILTER_NEAREST,
1571 [VK_TEX_MIPMAP_MODE_LINEAR] = MIPFILTER_LINEAR
1572 };
1573
1574 static const uint32_t vk_to_gen_tex_address[] = {
1575 [VK_TEX_ADDRESS_WRAP] = TCM_WRAP,
1576 [VK_TEX_ADDRESS_MIRROR] = TCM_MIRROR,
1577 [VK_TEX_ADDRESS_CLAMP] = TCM_CLAMP,
1578 [VK_TEX_ADDRESS_MIRROR_ONCE] = TCM_MIRROR_ONCE,
1579 [VK_TEX_ADDRESS_CLAMP_BORDER] = TCM_CLAMP_BORDER,
1580 };
1581
1582 static const uint32_t vk_to_gen_compare_op[] = {
1583 [VK_COMPARE_OP_NEVER] = PREFILTEROPNEVER,
1584 [VK_COMPARE_OP_LESS] = PREFILTEROPLESS,
1585 [VK_COMPARE_OP_EQUAL] = PREFILTEROPEQUAL,
1586 [VK_COMPARE_OP_LESS_EQUAL] = PREFILTEROPLEQUAL,
1587 [VK_COMPARE_OP_GREATER] = PREFILTEROPGREATER,
1588 [VK_COMPARE_OP_NOT_EQUAL] = PREFILTEROPNOTEQUAL,
1589 [VK_COMPARE_OP_GREATER_EQUAL] = PREFILTEROPGEQUAL,
1590 [VK_COMPARE_OP_ALWAYS] = PREFILTEROPALWAYS,
1591 };
1592
1593 if (pCreateInfo->maxAnisotropy > 0)
1594 anv_finishme("missing support for anisotropic filtering");
1595
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001596 struct GEN8_SAMPLER_STATE sampler_state = {
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001597 .SamplerDisable = false,
1598 .TextureBorderColorMode = DX10OGL,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001599 .LODPreClampMode = 0,
1600 .BaseMipLevel = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001601 .MipModeFilter = vk_to_gen_mipmap_mode[pCreateInfo->mipMode],
1602 .MagModeFilter = vk_to_gen_tex_filter[pCreateInfo->magFilter],
1603 .MinModeFilter = vk_to_gen_tex_filter[pCreateInfo->minFilter],
1604 .TextureLODBias = pCreateInfo->mipLodBias * 256,
1605 .AnisotropicAlgorithm = EWAApproximation,
1606 .MinLOD = pCreateInfo->minLod * 256,
1607 .MaxLOD = pCreateInfo->maxLod * 256,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001608 .ChromaKeyEnable = 0,
1609 .ChromaKeyIndex = 0,
1610 .ChromaKeyMode = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001611 .ShadowFunction = vk_to_gen_compare_op[pCreateInfo->compareOp],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001612 .CubeSurfaceControlMode = 0,
1613 .IndirectStatePointer = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001614 .LODClampMagnificationMode = MIPNONE,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001615 .MaximumAnisotropy = 0,
1616 .RAddressMinFilterRoundingEnable = 0,
1617 .RAddressMagFilterRoundingEnable = 0,
1618 .VAddressMinFilterRoundingEnable = 0,
1619 .VAddressMagFilterRoundingEnable = 0,
1620 .UAddressMinFilterRoundingEnable = 0,
1621 .UAddressMagFilterRoundingEnable = 0,
1622 .TrilinearFilterQuality = 0,
1623 .NonnormalizedCoordinateEnable = 0,
Kristian Høgsberga3fd1362015-05-12 21:44:59 -07001624 .TCXAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressU],
1625 .TCYAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressV],
1626 .TCZAddressControlMode = vk_to_gen_tex_address[pCreateInfo->addressW],
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001627 };
1628
1629 GEN8_SAMPLER_STATE_pack(NULL, sampler->state, &sampler_state);
1630
1631 *pSampler = (VkSampler) sampler;
1632
1633 return VK_SUCCESS;
1634}
1635
1636// Descriptor set functions
1637
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001638VkResult anv_CreateDescriptorSetLayout(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001639 VkDevice _device,
1640 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
1641 VkDescriptorSetLayout* pSetLayout)
1642{
1643 struct anv_device *device = (struct anv_device *) _device;
1644 struct anv_descriptor_set_layout *set_layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001645
1646 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
1647
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001648 uint32_t sampler_count[VK_NUM_SHADER_STAGE] = { 0, };
1649 uint32_t surface_count[VK_NUM_SHADER_STAGE] = { 0, };
1650 uint32_t num_dynamic_buffers = 0;
1651 uint32_t count = 0;
1652 uint32_t s;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001653
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001654 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001655 switch (pCreateInfo->pBinding[i].descriptorType) {
1656 case VK_DESCRIPTOR_TYPE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001657 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1658 sampler_count[s] += pCreateInfo->pBinding[i].count;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001659 break;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001660
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001661 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001662 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1663 sampler_count[s] += pCreateInfo->pBinding[i].count;
1664
1665 /* fall through */
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001666
1667 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1668 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1669 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1670 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1671 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1672 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1673 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1674 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001675 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1676 surface_count[s] += pCreateInfo->pBinding[i].count;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001677 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001678 default:
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001679 break;
1680 }
1681
1682 count += pCreateInfo->pBinding[i].count;
1683 }
1684
1685 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1686 switch (pCreateInfo->pBinding[i].descriptorType) {
1687 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1688 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1689 num_dynamic_buffers++;
1690 break;
1691 default:
1692 break;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07001693 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001694 }
1695
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001696 uint32_t sampler_total = 0;
1697 uint32_t surface_total = 0;
1698 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
1699 sampler_total += sampler_count[s];
1700 surface_total += surface_count[s];
1701 }
1702
1703 size_t size = sizeof(*set_layout) +
1704 (sampler_total + surface_total) * sizeof(uint32_t);
1705 set_layout = anv_device_alloc(device, size, 8,
1706 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1707 if (!set_layout)
1708 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1709
1710 set_layout->num_dynamic_buffers = num_dynamic_buffers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001711 set_layout->count = count;
1712
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07001713 uint32_t *p = set_layout->entries;
1714 uint32_t *sampler[VK_NUM_SHADER_STAGE];
1715 uint32_t *surface[VK_NUM_SHADER_STAGE];
1716 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
1717 set_layout->stage[s].surface_count = surface_count[s];
1718 set_layout->stage[s].surface_start = surface[s] = p;
1719 p += surface_count[s];
1720 set_layout->stage[s].sampler_count = sampler_count[s];
1721 set_layout->stage[s].sampler_start = sampler[s] = p;
1722 p += sampler_count[s];
1723 }
1724
1725 uint32_t descriptor = 0;
1726 for (uint32_t i = 0; i < pCreateInfo->count; i++) {
1727 switch (pCreateInfo->pBinding[i].descriptorType) {
1728 case VK_DESCRIPTOR_TYPE_SAMPLER:
1729 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1730 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].count; j++)
1731 *(sampler[s])++ = descriptor + j;
1732 break;
1733
1734 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1735 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1736 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].count; j++)
1737 *(sampler[s])++ = descriptor + j;
1738
1739 /* fallthrough */
1740
1741 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1742 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1743 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1744 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1745 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1746 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1747 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1748 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1749 for_each_bit(s, pCreateInfo->pBinding[i].stageFlags)
1750 for (uint32_t j = 0; j < pCreateInfo->pBinding[i].count; j++) {
1751 *(surface[s])++ = descriptor + j;
1752 }
1753 break;
1754 default:
1755 unreachable("");
1756 }
1757 descriptor += pCreateInfo->pBinding[i].count;
1758 }
1759
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001760 *pSetLayout = (VkDescriptorSetLayout) set_layout;
1761
1762 return VK_SUCCESS;
1763}
1764
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001765VkResult anv_BeginDescriptorPoolUpdate(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001766 VkDevice device,
1767 VkDescriptorUpdateMode updateMode)
1768{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001769 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001770}
1771
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001772VkResult anv_EndDescriptorPoolUpdate(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001773 VkDevice device,
1774 VkCmdBuffer cmd)
1775{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001776 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001777}
1778
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001779VkResult anv_CreateDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001780 VkDevice device,
1781 VkDescriptorPoolUsage poolUsage,
1782 uint32_t maxSets,
1783 const VkDescriptorPoolCreateInfo* pCreateInfo,
1784 VkDescriptorPool* pDescriptorPool)
1785{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001786 *pDescriptorPool = 1;
1787
1788 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001789}
1790
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001791VkResult anv_ResetDescriptorPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001792 VkDevice device,
1793 VkDescriptorPool descriptorPool)
1794{
Kristian Høgsberga9f21152015-05-17 18:38:34 -07001795 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001796}
1797
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001798VkResult anv_AllocDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001799 VkDevice _device,
1800 VkDescriptorPool descriptorPool,
1801 VkDescriptorSetUsage setUsage,
1802 uint32_t count,
1803 const VkDescriptorSetLayout* pSetLayouts,
1804 VkDescriptorSet* pDescriptorSets,
1805 uint32_t* pCount)
1806{
1807 struct anv_device *device = (struct anv_device *) _device;
1808 const struct anv_descriptor_set_layout *layout;
1809 struct anv_descriptor_set *set;
1810 size_t size;
1811
1812 for (uint32_t i = 0; i < count; i++) {
1813 layout = (struct anv_descriptor_set_layout *) pSetLayouts[i];
Kristian Høgsberga77229c2015-05-13 11:49:30 -07001814 size = sizeof(*set) + layout->count * sizeof(set->descriptors[0]);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001815 set = anv_device_alloc(device, size, 8,
1816 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1817 if (!set) {
1818 *pCount = i;
1819 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1820 }
1821
Jason Ekstrand0a547512015-05-21 16:33:04 -07001822 /* Descriptor sets may not be 100% filled out so we need to memset to
1823 * ensure that we can properly detect and handle holes.
1824 */
1825 memset(set, 0, size);
1826
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001827 pDescriptorSets[i] = (VkDescriptorSet) set;
1828 }
1829
1830 *pCount = count;
1831
Kristian Høgsbergb4b3bd12015-05-17 18:39:12 -07001832 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001833}
1834
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001835void anv_ClearDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001836 VkDevice device,
1837 VkDescriptorPool descriptorPool,
1838 uint32_t count,
1839 const VkDescriptorSet* pDescriptorSets)
1840{
1841}
1842
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001843void anv_UpdateDescriptors(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001844 VkDevice _device,
1845 VkDescriptorSet descriptorSet,
1846 uint32_t updateCount,
1847 const void** ppUpdateArray)
1848{
1849 struct anv_descriptor_set *set = (struct anv_descriptor_set *) descriptorSet;
1850 VkUpdateSamplers *update_samplers;
1851 VkUpdateSamplerTextures *update_sampler_textures;
1852 VkUpdateImages *update_images;
1853 VkUpdateBuffers *update_buffers;
1854 VkUpdateAsCopy *update_as_copy;
1855
1856 for (uint32_t i = 0; i < updateCount; i++) {
1857 const struct anv_common *common = ppUpdateArray[i];
1858
1859 switch (common->sType) {
1860 case VK_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1861 update_samplers = (VkUpdateSamplers *) common;
1862
1863 for (uint32_t j = 0; j < update_samplers->count; j++) {
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001864 set->descriptors[update_samplers->binding + j].sampler =
1865 (struct anv_sampler *) update_samplers->pSamplers[j];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001866 }
1867 break;
1868
1869 case VK_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1870 /* FIXME: Shouldn't this be *_UPDATE_SAMPLER_IMAGES? */
1871 update_sampler_textures = (VkUpdateSamplerTextures *) common;
1872
1873 for (uint32_t j = 0; j < update_sampler_textures->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001874 set->descriptors[update_sampler_textures->binding + j].view =
1875 (struct anv_surface_view *)
Kristian Høgsberg4f9eaf72015-05-13 14:02:35 -07001876 update_sampler_textures->pSamplerImageViews[j].pImageView->view;
1877 set->descriptors[update_sampler_textures->binding + j].sampler =
1878 (struct anv_sampler *)
1879 update_sampler_textures->pSamplerImageViews[j].sampler;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001880 }
1881 break;
1882
1883 case VK_STRUCTURE_TYPE_UPDATE_IMAGES:
1884 update_images = (VkUpdateImages *) common;
1885
1886 for (uint32_t j = 0; j < update_images->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001887 set->descriptors[update_images->binding + j].view =
1888 (struct anv_surface_view *) update_images->pImageViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001889 }
1890 break;
1891
1892 case VK_STRUCTURE_TYPE_UPDATE_BUFFERS:
1893 update_buffers = (VkUpdateBuffers *) common;
1894
1895 for (uint32_t j = 0; j < update_buffers->count; j++) {
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07001896 set->descriptors[update_buffers->binding + j].view =
1897 (struct anv_surface_view *) update_buffers->pBufferViews[j].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001898 }
1899 /* FIXME: descriptor arrays? */
1900 break;
1901
1902 case VK_STRUCTURE_TYPE_UPDATE_AS_COPY:
1903 update_as_copy = (VkUpdateAsCopy *) common;
1904 (void) update_as_copy;
1905 break;
1906
1907 default:
1908 break;
1909 }
1910 }
1911}
1912
1913// State object functions
1914
1915static inline int64_t
1916clamp_int64(int64_t x, int64_t min, int64_t max)
1917{
1918 if (x < min)
1919 return min;
1920 else if (x < max)
1921 return x;
1922 else
1923 return max;
1924}
1925
Jason Ekstrand57153da2015-05-22 15:15:08 -07001926static void
1927anv_dynamic_vp_state_destroy(struct anv_device *device,
1928 struct anv_object *object,
1929 VkObjectType obj_type)
1930{
1931 struct anv_dynamic_vp_state *state = (void *)object;
1932
1933 assert(obj_type == VK_OBJECT_TYPE_DYNAMIC_VP_STATE);
1934
1935 anv_state_pool_free(&device->dynamic_state_pool, state->sf_clip_vp);
1936 anv_state_pool_free(&device->dynamic_state_pool, state->cc_vp);
1937 anv_state_pool_free(&device->dynamic_state_pool, state->scissor);
1938
1939 anv_device_free(device, state);
1940}
1941
Kristian Høgsberg454345d2015-05-17 16:33:48 -07001942VkResult anv_CreateDynamicViewportState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001943 VkDevice _device,
1944 const VkDynamicVpStateCreateInfo* pCreateInfo,
1945 VkDynamicVpState* pState)
1946{
1947 struct anv_device *device = (struct anv_device *) _device;
1948 struct anv_dynamic_vp_state *state;
1949
1950 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO);
1951
1952 state = anv_device_alloc(device, sizeof(*state), 8,
1953 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
1954 if (state == NULL)
1955 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1956
Jason Ekstrand57153da2015-05-22 15:15:08 -07001957 state->base.destructor = anv_dynamic_vp_state_destroy;
1958
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001959 unsigned count = pCreateInfo->viewportAndScissorCount;
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001960 state->sf_clip_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001961 count * 64, 64);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001962 state->cc_vp = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001963 count * 8, 32);
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07001964 state->scissor = anv_state_pool_alloc(&device->dynamic_state_pool,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07001965 count * 32, 32);
1966
1967 for (uint32_t i = 0; i < pCreateInfo->viewportAndScissorCount; i++) {
1968 const VkViewport *vp = &pCreateInfo->pViewports[i];
1969 const VkRect *s = &pCreateInfo->pScissors[i];
1970
1971 struct GEN8_SF_CLIP_VIEWPORT sf_clip_viewport = {
1972 .ViewportMatrixElementm00 = vp->width / 2,
1973 .ViewportMatrixElementm11 = vp->height / 2,
1974 .ViewportMatrixElementm22 = (vp->maxDepth - vp->minDepth) / 2,
1975 .ViewportMatrixElementm30 = vp->originX + vp->width / 2,
1976 .ViewportMatrixElementm31 = vp->originY + vp->height / 2,
1977 .ViewportMatrixElementm32 = (vp->maxDepth + vp->minDepth) / 2,
1978 .XMinClipGuardband = -1.0f,
1979 .XMaxClipGuardband = 1.0f,
1980 .YMinClipGuardband = -1.0f,
1981 .YMaxClipGuardband = 1.0f,
1982 .XMinViewPort = vp->originX,
1983 .XMaxViewPort = vp->originX + vp->width - 1,
1984 .YMinViewPort = vp->originY,
1985 .YMaxViewPort = vp->originY + vp->height - 1,
1986 };
1987
1988 struct GEN8_CC_VIEWPORT cc_viewport = {
1989 .MinimumDepth = vp->minDepth,
1990 .MaximumDepth = vp->maxDepth
1991 };
1992
1993 /* Since xmax and ymax are inclusive, we have to have xmax < xmin or
1994 * ymax < ymin for empty clips. In case clip x, y, width height are all
1995 * 0, the clamps below produce 0 for xmin, ymin, xmax, ymax, which isn't
1996 * what we want. Just special case empty clips and produce a canonical
1997 * empty clip. */
1998 static const struct GEN8_SCISSOR_RECT empty_scissor = {
1999 .ScissorRectangleYMin = 1,
2000 .ScissorRectangleXMin = 1,
2001 .ScissorRectangleYMax = 0,
2002 .ScissorRectangleXMax = 0
2003 };
2004
2005 const int max = 0xffff;
2006 struct GEN8_SCISSOR_RECT scissor = {
2007 /* Do this math using int64_t so overflow gets clamped correctly. */
2008 .ScissorRectangleYMin = clamp_int64(s->offset.y, 0, max),
2009 .ScissorRectangleXMin = clamp_int64(s->offset.x, 0, max),
2010 .ScissorRectangleYMax = clamp_int64((uint64_t) s->offset.y + s->extent.height - 1, 0, max),
2011 .ScissorRectangleXMax = clamp_int64((uint64_t) s->offset.x + s->extent.width - 1, 0, max)
2012 };
2013
2014 GEN8_SF_CLIP_VIEWPORT_pack(NULL, state->sf_clip_vp.map + i * 64, &sf_clip_viewport);
2015 GEN8_CC_VIEWPORT_pack(NULL, state->cc_vp.map + i * 32, &cc_viewport);
2016
2017 if (s->extent.width <= 0 || s->extent.height <= 0) {
2018 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &empty_scissor);
2019 } else {
2020 GEN8_SCISSOR_RECT_pack(NULL, state->scissor.map + i * 32, &scissor);
2021 }
2022 }
2023
2024 *pState = (VkDynamicVpState) state;
2025
2026 return VK_SUCCESS;
2027}
2028
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002029VkResult anv_CreateDynamicRasterState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002030 VkDevice _device,
2031 const VkDynamicRsStateCreateInfo* pCreateInfo,
2032 VkDynamicRsState* pState)
2033{
2034 struct anv_device *device = (struct anv_device *) _device;
2035 struct anv_dynamic_rs_state *state;
2036
2037 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_RS_STATE_CREATE_INFO);
2038
2039 state = anv_device_alloc(device, sizeof(*state), 8,
2040 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2041 if (state == NULL)
2042 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2043
2044 /* Missing these:
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002045 * float pointFadeThreshold;
2046 * // optional (GL45) - Size of point fade threshold
2047 */
2048
2049 struct GEN8_3DSTATE_SF sf = {
2050 GEN8_3DSTATE_SF_header,
2051 .LineWidth = pCreateInfo->lineWidth,
2052 .PointWidth = pCreateInfo->pointSize,
2053 };
2054
2055 GEN8_3DSTATE_SF_pack(NULL, state->state_sf, &sf);
2056
Kristian Høgsberg99883772015-05-26 09:40:10 -07002057 bool enable_bias = pCreateInfo->depthBias != 0.0f ||
2058 pCreateInfo->slopeScaledDepthBias != 0.0f;
2059 struct GEN8_3DSTATE_RASTER raster = {
2060 .GlobalDepthOffsetEnableSolid = enable_bias,
2061 .GlobalDepthOffsetEnableWireframe = enable_bias,
2062 .GlobalDepthOffsetEnablePoint = enable_bias,
2063 .GlobalDepthOffsetConstant = pCreateInfo->depthBias,
2064 .GlobalDepthOffsetScale = pCreateInfo->slopeScaledDepthBias,
2065 .GlobalDepthOffsetClamp = pCreateInfo->depthBiasClamp
2066 };
2067
2068 GEN8_3DSTATE_RASTER_pack(NULL, state->state_raster, &raster);
2069
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002070 *pState = (VkDynamicRsState) state;
2071
2072 return VK_SUCCESS;
2073}
2074
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002075VkResult anv_CreateDynamicColorBlendState(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002076 VkDevice _device,
2077 const VkDynamicCbStateCreateInfo* pCreateInfo,
2078 VkDynamicCbState* pState)
2079{
2080 struct anv_device *device = (struct anv_device *) _device;
2081 struct anv_dynamic_cb_state *state;
2082
2083 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_CB_STATE_CREATE_INFO);
2084
2085 state = anv_device_alloc(device, sizeof(*state), 8,
2086 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2087 if (state == NULL)
2088 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2089
2090 *pState = (VkDynamicCbState) state;
2091
2092 return VK_SUCCESS;
2093}
2094
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002095VkResult anv_CreateDynamicDepthStencilState(
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002096 VkDevice _device,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002097 const VkDynamicDsStateCreateInfo* pCreateInfo,
2098 VkDynamicDsState* pState)
2099{
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002100 struct anv_device *device = (struct anv_device *) _device;
2101 struct anv_dynamic_ds_state *state;
2102
2103 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DYNAMIC_DS_STATE_CREATE_INFO);
2104
2105 state = anv_device_alloc(device, sizeof(*state), 8,
2106 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2107 if (state == NULL)
2108 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2109
2110 struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
2111 GEN8_3DSTATE_WM_DEPTH_STENCIL_header,
2112
2113 /* pCreateInfo->stencilFrontRef,
2114 * pCreateInfo->stencilBackRef,
2115 * go in cc state
2116 */
2117
2118 /* Is this what we need to do? */
2119 .StencilBufferWriteEnable = pCreateInfo->stencilWriteMask != 0,
2120
2121 .StencilTestMask = pCreateInfo->stencilReadMask,
2122 .StencilWriteMask = pCreateInfo->stencilWriteMask,
2123
2124 .BackfaceStencilTestMask = pCreateInfo->stencilReadMask,
2125 .BackfaceStencilWriteMask = pCreateInfo->stencilWriteMask,
2126 };
2127
2128 GEN8_3DSTATE_WM_DEPTH_STENCIL_pack(NULL, state->state_wm_depth_stencil,
2129 &wm_depth_stencil);
2130
2131 *pState = (VkDynamicDsState) state;
2132
2133 return VK_SUCCESS;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002134}
2135
2136// Command buffer functions
2137
Jason Ekstrand57153da2015-05-22 15:15:08 -07002138static void
2139anv_cmd_buffer_destroy(struct anv_device *device,
2140 struct anv_object *object,
2141 VkObjectType obj_type)
2142{
2143 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) object;
2144
2145 assert(obj_type == VK_OBJECT_TYPE_COMMAND_BUFFER);
2146
2147 anv_gem_munmap(cmd_buffer->surface_bo.map, BATCH_SIZE);
2148 anv_gem_close(device, cmd_buffer->surface_bo.gem_handle);
2149 anv_state_stream_finish(&cmd_buffer->surface_state_stream);
2150 anv_state_stream_finish(&cmd_buffer->dynamic_state_stream);
2151 anv_state_stream_finish(&cmd_buffer->binding_table_state_stream);
2152 anv_batch_finish(&cmd_buffer->batch, device);
2153 anv_device_free(device, cmd_buffer->exec2_objects);
2154 anv_device_free(device, cmd_buffer->exec2_bos);
2155 anv_device_free(device, cmd_buffer);
2156}
2157
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002158VkResult anv_CreateCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002159 VkDevice _device,
2160 const VkCmdBufferCreateInfo* pCreateInfo,
2161 VkCmdBuffer* pCmdBuffer)
2162{
2163 struct anv_device *device = (struct anv_device *) _device;
2164 struct anv_cmd_buffer *cmd_buffer;
2165 VkResult result;
2166
2167 cmd_buffer = anv_device_alloc(device, sizeof(*cmd_buffer), 8,
2168 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2169 if (cmd_buffer == NULL)
2170 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2171
Jason Ekstrand57153da2015-05-22 15:15:08 -07002172 cmd_buffer->base.destructor = anv_cmd_buffer_destroy;
2173
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002174 cmd_buffer->device = device;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002175 cmd_buffer->rs_state = NULL;
2176 cmd_buffer->vp_state = NULL;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002177 memset(&cmd_buffer->default_bindings, 0, sizeof(cmd_buffer->default_bindings));
2178 cmd_buffer->bindings = &cmd_buffer->default_bindings;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002179
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002180 result = anv_batch_init(&cmd_buffer->batch, device);
2181 if (result != VK_SUCCESS)
2182 goto fail;
2183
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002184 result = anv_bo_init_new(&cmd_buffer->surface_bo, device, BATCH_SIZE);
2185 if (result != VK_SUCCESS)
2186 goto fail_batch;
2187
2188 cmd_buffer->surface_bo.map =
2189 anv_gem_mmap(device, cmd_buffer->surface_bo.gem_handle, 0, BATCH_SIZE);
2190 if (cmd_buffer->surface_bo.map == NULL) {
2191 result = vk_error(VK_ERROR_MEMORY_MAP_FAILED);
2192 goto fail_surface_bo;
2193 }
2194
2195 /* Start surface_next at 1 so surface offset 0 is invalid. */
2196 cmd_buffer->surface_next = 1;
2197 cmd_buffer->surface_relocs.num_relocs = 0;
2198
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002199 cmd_buffer->exec2_objects =
2200 anv_device_alloc(device, 8192 * sizeof(cmd_buffer->exec2_objects[0]), 8,
2201 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2202 if (cmd_buffer->exec2_objects == NULL) {
2203 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002204 goto fail_surface_map;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002205 }
2206
2207 cmd_buffer->exec2_bos =
2208 anv_device_alloc(device, 8192 * sizeof(cmd_buffer->exec2_bos[0]), 8,
2209 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
2210 if (cmd_buffer->exec2_bos == NULL) {
2211 result = vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
2212 goto fail_exec2_objects;
2213 }
2214
Jason Ekstrand923691c2015-05-18 19:56:32 -07002215 anv_state_stream_init(&cmd_buffer->binding_table_state_stream,
2216 &device->binding_table_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002217 anv_state_stream_init(&cmd_buffer->surface_state_stream,
2218 &device->surface_state_block_pool);
Kristian Høgsberga1ec7892015-05-13 13:51:08 -07002219 anv_state_stream_init(&cmd_buffer->dynamic_state_stream,
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002220 &device->dynamic_state_block_pool);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002221
2222 cmd_buffer->dirty = 0;
2223 cmd_buffer->vb_dirty = 0;
Jason Ekstrandae8c93e2015-05-25 17:08:11 -07002224 cmd_buffer->pipeline = NULL;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002225
2226 *pCmdBuffer = (VkCmdBuffer) cmd_buffer;
2227
2228 return VK_SUCCESS;
2229
2230 fail_exec2_objects:
2231 anv_device_free(device, cmd_buffer->exec2_objects);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002232 fail_surface_map:
2233 anv_gem_munmap(cmd_buffer->surface_bo.map, BATCH_SIZE);
2234 fail_surface_bo:
2235 anv_gem_close(device, cmd_buffer->surface_bo.gem_handle);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002236 fail_batch:
2237 anv_batch_finish(&cmd_buffer->batch, device);
2238 fail:
2239 anv_device_free(device, cmd_buffer);
2240
2241 return result;
2242}
2243
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002244VkResult anv_BeginCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002245 VkCmdBuffer cmdBuffer,
2246 const VkCmdBufferBeginInfo* pBeginInfo)
2247{
2248 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2249 struct anv_device *device = cmd_buffer->device;
2250
2251 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPELINE_SELECT,
2252 .PipelineSelection = _3D);
2253 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_SIP);
2254
2255 anv_batch_emit(&cmd_buffer->batch, GEN8_STATE_BASE_ADDRESS,
2256 .GeneralStateBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002257 .GeneralStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002258 .GeneralStateBaseAddressModifyEnable = true,
2259 .GeneralStateBufferSize = 0xfffff,
2260 .GeneralStateBufferSizeModifyEnable = true,
2261
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002262 .SurfaceStateBaseAddress = { &cmd_buffer->surface_bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002263 .SurfaceStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002264 .SurfaceStateBaseAddressModifyEnable = true,
2265
Kristian Høgsberg0a775e12015-05-13 15:34:34 -07002266 .DynamicStateBaseAddress = { &device->dynamic_state_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002267 .DynamicStateMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002268 .DynamicStateBaseAddressModifyEnable = true,
2269 .DynamicStateBufferSize = 0xfffff,
2270 .DynamicStateBufferSizeModifyEnable = true,
2271
2272 .IndirectObjectBaseAddress = { NULL, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002273 .IndirectObjectMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002274 .IndirectObjectBaseAddressModifyEnable = true,
2275 .IndirectObjectBufferSize = 0xfffff,
2276 .IndirectObjectBufferSizeModifyEnable = true,
2277
2278 .InstructionBaseAddress = { &device->instruction_block_pool.bo, 0 },
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002279 .InstructionMemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002280 .InstructionBaseAddressModifyEnable = true,
2281 .InstructionBufferSize = 0xfffff,
2282 .InstructionBuffersizeModifyEnable = true);
2283
2284 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VF_STATISTICS,
2285 .StatisticsEnable = true);
2286 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HS, .Enable = false);
2287 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_TE, .TEEnable = false);
2288 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DS, .FunctionEnable = false);
2289 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STREAMOUT, .SOFunctionEnable = false);
2290
2291 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_VS,
2292 .ConstantBufferOffset = 0,
2293 .ConstantBufferSize = 4);
2294 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_GS,
2295 .ConstantBufferOffset = 4,
2296 .ConstantBufferSize = 4);
2297 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_PUSH_CONSTANT_ALLOC_PS,
2298 .ConstantBufferOffset = 8,
2299 .ConstantBufferSize = 4);
2300
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002301 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_WM_CHROMAKEY,
2302 .ChromaKeyKillEnable = false);
2303 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SBE_SWIZ);
2304 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_AA_LINE_PARAMETERS);
2305
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002306 return VK_SUCCESS;
2307}
2308
2309static void
2310anv_cmd_buffer_add_bo(struct anv_cmd_buffer *cmd_buffer,
2311 struct anv_bo *bo, struct anv_reloc_list *list)
2312{
2313 struct drm_i915_gem_exec_object2 *obj;
2314
2315 bo->index = cmd_buffer->bo_count;
2316 obj = &cmd_buffer->exec2_objects[bo->index];
2317 cmd_buffer->exec2_bos[bo->index] = bo;
2318 cmd_buffer->bo_count++;
2319
2320 obj->handle = bo->gem_handle;
2321 obj->relocation_count = 0;
2322 obj->relocs_ptr = 0;
2323 obj->alignment = 0;
2324 obj->offset = bo->offset;
2325 obj->flags = 0;
2326 obj->rsvd1 = 0;
2327 obj->rsvd2 = 0;
2328
2329 if (list) {
2330 obj->relocation_count = list->num_relocs;
2331 obj->relocs_ptr = (uintptr_t) list->relocs;
2332 }
2333}
2334
2335static void
2336anv_cmd_buffer_add_validate_bos(struct anv_cmd_buffer *cmd_buffer,
2337 struct anv_reloc_list *list)
2338{
2339 struct anv_bo *bo, *batch_bo;
2340
2341 batch_bo = &cmd_buffer->batch.bo;
2342 for (size_t i = 0; i < list->num_relocs; i++) {
2343 bo = list->reloc_bos[i];
2344 /* Skip any relocations targeting the batch bo. We need to make sure
2345 * it's the last in the list so we'll add it manually later.
2346 */
2347 if (bo == batch_bo)
2348 continue;
2349 if (bo->index < cmd_buffer->bo_count && cmd_buffer->exec2_bos[bo->index] == bo)
2350 continue;
2351
2352 anv_cmd_buffer_add_bo(cmd_buffer, bo, NULL);
2353 }
2354}
2355
2356static void
2357anv_cmd_buffer_process_relocs(struct anv_cmd_buffer *cmd_buffer,
2358 struct anv_reloc_list *list)
2359{
2360 struct anv_bo *bo;
2361
2362 /* If the kernel supports I915_EXEC_NO_RELOC, it will compare offset in
2363 * struct drm_i915_gem_exec_object2 against the bos current offset and if
2364 * all bos haven't moved it will skip relocation processing alltogether.
2365 * If I915_EXEC_NO_RELOC is not supported, the kernel ignores the incoming
2366 * value of offset so we can set it either way. For that to work we need
2367 * to make sure all relocs use the same presumed offset.
2368 */
2369
2370 for (size_t i = 0; i < list->num_relocs; i++) {
2371 bo = list->reloc_bos[i];
2372 if (bo->offset != list->relocs[i].presumed_offset)
2373 cmd_buffer->need_reloc = true;
2374
2375 list->relocs[i].target_handle = bo->index;
2376 }
2377}
2378
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002379VkResult anv_EndCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002380 VkCmdBuffer cmdBuffer)
2381{
2382 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2383 struct anv_device *device = cmd_buffer->device;
2384 struct anv_batch *batch = &cmd_buffer->batch;
2385
2386 anv_batch_emit(batch, GEN8_MI_BATCH_BUFFER_END);
2387
2388 /* Round batch up to an even number of dwords. */
2389 if ((batch->next - batch->bo.map) & 4)
2390 anv_batch_emit(batch, GEN8_MI_NOOP);
2391
2392 cmd_buffer->bo_count = 0;
2393 cmd_buffer->need_reloc = false;
2394
2395 /* Lock for access to bo->index. */
2396 pthread_mutex_lock(&device->mutex);
2397
2398 /* Add block pool bos first so we can add them with their relocs. */
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002399 anv_cmd_buffer_add_bo(cmd_buffer, &cmd_buffer->surface_bo,
2400 &cmd_buffer->surface_relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002401
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002402 anv_cmd_buffer_add_validate_bos(cmd_buffer, &cmd_buffer->surface_relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002403 anv_cmd_buffer_add_validate_bos(cmd_buffer, &batch->cmd_relocs);
2404 anv_cmd_buffer_add_bo(cmd_buffer, &batch->bo, &batch->cmd_relocs);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002405 anv_cmd_buffer_process_relocs(cmd_buffer, &cmd_buffer->surface_relocs);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002406 anv_cmd_buffer_process_relocs(cmd_buffer, &batch->cmd_relocs);
2407
2408 cmd_buffer->execbuf.buffers_ptr = (uintptr_t) cmd_buffer->exec2_objects;
2409 cmd_buffer->execbuf.buffer_count = cmd_buffer->bo_count;
2410 cmd_buffer->execbuf.batch_start_offset = 0;
2411 cmd_buffer->execbuf.batch_len = batch->next - batch->bo.map;
2412 cmd_buffer->execbuf.cliprects_ptr = 0;
2413 cmd_buffer->execbuf.num_cliprects = 0;
2414 cmd_buffer->execbuf.DR1 = 0;
2415 cmd_buffer->execbuf.DR4 = 0;
2416
2417 cmd_buffer->execbuf.flags = I915_EXEC_HANDLE_LUT;
2418 if (!cmd_buffer->need_reloc)
2419 cmd_buffer->execbuf.flags |= I915_EXEC_NO_RELOC;
2420 cmd_buffer->execbuf.flags |= I915_EXEC_RENDER;
2421 cmd_buffer->execbuf.rsvd1 = device->context_id;
2422 cmd_buffer->execbuf.rsvd2 = 0;
2423
2424 pthread_mutex_unlock(&device->mutex);
2425
2426 return VK_SUCCESS;
2427}
2428
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002429VkResult anv_ResetCommandBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002430 VkCmdBuffer cmdBuffer)
2431{
2432 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2433
2434 anv_batch_reset(&cmd_buffer->batch);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002435 cmd_buffer->surface_next = 0;
2436 cmd_buffer->surface_relocs.num_relocs = 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002437
2438 return VK_SUCCESS;
2439}
2440
2441// Command buffer building functions
2442
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002443void anv_CmdBindPipeline(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002444 VkCmdBuffer cmdBuffer,
2445 VkPipelineBindPoint pipelineBindPoint,
2446 VkPipeline _pipeline)
2447{
2448 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002449 struct anv_pipeline *pipeline = (struct anv_pipeline *) _pipeline;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002450
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002451 cmd_buffer->pipeline = pipeline;
2452 cmd_buffer->vb_dirty |= pipeline->vb_used;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002453 cmd_buffer->dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY;
2454}
2455
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002456void anv_CmdBindDynamicStateObject(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002457 VkCmdBuffer cmdBuffer,
2458 VkStateBindPoint stateBindPoint,
2459 VkDynamicStateObject dynamicState)
2460{
2461 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2462 struct anv_dynamic_vp_state *vp_state;
2463
2464 switch (stateBindPoint) {
2465 case VK_STATE_BIND_POINT_VIEWPORT:
2466 vp_state = (struct anv_dynamic_vp_state *) dynamicState;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07002467 /* We emit state immediately, but set cmd_buffer->vp_state to indicate
2468 * that vp state has been set in this command buffer. */
2469 cmd_buffer->vp_state = vp_state;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002470 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_SCISSOR_STATE_POINTERS,
2471 .ScissorRectPointer = vp_state->scissor.offset);
2472 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_CC,
2473 .CCViewportPointer = vp_state->cc_vp.offset);
2474 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_VIEWPORT_STATE_POINTERS_SF_CLIP,
2475 .SFClipViewportPointer = vp_state->sf_clip_vp.offset);
2476 break;
2477 case VK_STATE_BIND_POINT_RASTER:
2478 cmd_buffer->rs_state = (struct anv_dynamic_rs_state *) dynamicState;
2479 cmd_buffer->dirty |= ANV_CMD_BUFFER_RS_DIRTY;
2480 break;
2481 case VK_STATE_BIND_POINT_COLOR_BLEND:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002482 break;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002483 case VK_STATE_BIND_POINT_DEPTH_STENCIL:
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002484 cmd_buffer->ds_state = (struct anv_dynamic_ds_state *) dynamicState;
2485 cmd_buffer->dirty |= ANV_CMD_BUFFER_DS_DIRTY;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002486 break;
2487 default:
2488 break;
2489 };
2490}
2491
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002492static struct anv_state
2493anv_cmd_buffer_alloc_surface_state(struct anv_cmd_buffer *cmd_buffer,
2494 uint32_t size, uint32_t alignment)
2495{
2496 struct anv_state state;
2497
2498 state.offset = ALIGN_U32(cmd_buffer->surface_next, alignment);
2499 state.map = cmd_buffer->surface_bo.map + state.offset;
2500 state.alloc_size = size;
2501 cmd_buffer->surface_next = state.offset + size;
2502
2503 assert(state.offset + size < cmd_buffer->surface_bo.size);
2504
2505 return state;
2506}
2507
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002508void anv_CmdBindDescriptorSets(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002509 VkCmdBuffer cmdBuffer,
2510 VkPipelineBindPoint pipelineBindPoint,
2511 uint32_t firstSet,
2512 uint32_t setCount,
2513 const VkDescriptorSet* pDescriptorSets,
2514 uint32_t dynamicOffsetCount,
2515 const uint32_t* pDynamicOffsets)
2516{
2517 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002518 struct anv_pipeline_layout *layout = cmd_buffer->pipeline->layout;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002519 struct anv_bindings *bindings = cmd_buffer->bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002520
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002521 uint32_t offset = 0;
2522 for (uint32_t i = 0; i < setCount; i++) {
2523 struct anv_descriptor_set *set =
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002524 (struct anv_descriptor_set *) pDescriptorSets[i];
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002525 struct anv_descriptor_set_layout *set_layout = layout->set[firstSet + i].layout;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002526
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002527 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
2528 uint32_t *surface_to_desc = set_layout->stage[s].surface_start;
2529 uint32_t *sampler_to_desc = set_layout->stage[s].sampler_start;
2530 uint32_t bias = s == VK_SHADER_STAGE_FRAGMENT ? MAX_RTS : 0;
2531 uint32_t start;
2532
2533 start = bias + layout->set[firstSet + i].surface_start[s];
2534 for (uint32_t b = 0; b < set_layout->stage[s].surface_count; b++) {
2535 struct anv_surface_view *view = set->descriptors[surface_to_desc[b]].view;
Jason Ekstrand0a547512015-05-21 16:33:04 -07002536 if (!view)
2537 continue;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002538
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002539 struct anv_state state =
2540 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
2541 memcpy(state.map, view->surface_state.map, 64);
2542
Jason Ekstrand519fe762015-05-21 15:55:27 -07002543 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
2544 *(uint64_t *)(state.map + 8 * 4) =
2545 anv_reloc_list_add(&cmd_buffer->surface_relocs,
2546 state.offset + 8 * 4,
2547 view->bo, view->offset);
2548
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002549 bindings->descriptors[s].surfaces[start + b] = state.offset;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002550 }
2551
2552 start = layout->set[firstSet + i].sampler_start[s];
2553 for (uint32_t b = 0; b < set_layout->stage[s].sampler_count; b++) {
2554 struct anv_sampler *sampler = set->descriptors[sampler_to_desc[b]].sampler;
Jason Ekstrand0a547512015-05-21 16:33:04 -07002555 if (!sampler)
2556 continue;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002557
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002558 memcpy(&bindings->descriptors[s].samplers[start + b],
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002559 sampler->state, sizeof(sampler->state));
2560 }
2561 }
2562
2563 offset += layout->set[firstSet + i].layout->num_dynamic_buffers;
2564 }
2565
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002566 cmd_buffer->dirty |= ANV_CMD_BUFFER_DESCRIPTOR_SET_DIRTY;
2567}
2568
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002569void anv_CmdBindIndexBuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002570 VkCmdBuffer cmdBuffer,
2571 VkBuffer _buffer,
2572 VkDeviceSize offset,
2573 VkIndexType indexType)
2574{
2575 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2576 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
2577
2578 static const uint32_t vk_to_gen_index_type[] = {
2579 [VK_INDEX_TYPE_UINT8] = INDEX_BYTE,
2580 [VK_INDEX_TYPE_UINT16] = INDEX_WORD,
2581 [VK_INDEX_TYPE_UINT32] = INDEX_DWORD,
2582 };
2583
2584 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
2585 .IndexFormat = vk_to_gen_index_type[indexType],
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002586 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002587 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002588 .BufferSize = buffer->size - offset);
2589}
2590
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002591void anv_CmdBindVertexBuffers(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002592 VkCmdBuffer cmdBuffer,
2593 uint32_t startBinding,
2594 uint32_t bindingCount,
2595 const VkBuffer* pBuffers,
2596 const VkDeviceSize* pOffsets)
2597{
2598 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002599 struct anv_bindings *bindings = cmd_buffer->bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002600
2601 /* We have to defer setting up vertex buffer since we need the buffer
2602 * stride from the pipeline. */
2603
2604 for (uint32_t i = 0; i < bindingCount; i++) {
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002605 bindings->vb[startBinding + i].buffer = (struct anv_buffer *) pBuffers[i];
2606 bindings->vb[startBinding + i].offset = pOffsets[i];
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002607 cmd_buffer->vb_dirty |= 1 << (startBinding + i);
2608 }
2609}
2610
2611static void
2612flush_descriptor_sets(struct anv_cmd_buffer *cmd_buffer)
2613{
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002614 struct anv_pipeline_layout *layout = cmd_buffer->pipeline->layout;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002615 struct anv_bindings *bindings = cmd_buffer->bindings;
2616 uint32_t layers = cmd_buffer->framebuffer->layers;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002617
2618 for (uint32_t s = 0; s < VK_NUM_SHADER_STAGE; s++) {
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002619 uint32_t bias;
2620
2621 if (s == VK_SHADER_STAGE_FRAGMENT) {
2622 bias = MAX_RTS;
2623 layers = cmd_buffer->framebuffer->layers;
2624 } else {
2625 bias = 0;
2626 layers = 0;
2627 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002628
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002629 /* This is a little awkward: layout can be NULL but we still have to
2630 * allocate and set a binding table for the PS stage for render
2631 * targets. */
Jason Ekstrand4223de72015-05-16 10:23:09 -07002632 uint32_t surface_count = layout ? layout->stage[s].surface_count : 0;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002633
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002634 if (layers + surface_count > 0) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002635 struct anv_state state;
2636 uint32_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002637
Jason Ekstrand057bef82015-05-16 10:42:51 -07002638 size = (bias + surface_count) * sizeof(uint32_t);
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002639 state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002640 memcpy(state.map, bindings->descriptors[s].surfaces, size);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002641
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002642 static const uint32_t binding_table_opcodes[] = {
2643 [VK_SHADER_STAGE_VERTEX] = 38,
2644 [VK_SHADER_STAGE_TESS_CONTROL] = 39,
2645 [VK_SHADER_STAGE_TESS_EVALUATION] = 40,
2646 [VK_SHADER_STAGE_GEOMETRY] = 41,
2647 [VK_SHADER_STAGE_FRAGMENT] = 42,
2648 [VK_SHADER_STAGE_COMPUTE] = 0,
2649 };
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002650
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07002651 anv_batch_emit(&cmd_buffer->batch,
2652 GEN8_3DSTATE_BINDING_TABLE_POINTERS_VS,
2653 ._3DCommandSubOpcode = binding_table_opcodes[s],
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002654 .PointertoVSBindingTable = state.offset);
2655 }
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07002656
2657 if (layout && layout->stage[s].sampler_count > 0) {
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002658 struct anv_state state;
2659 size_t size;
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07002660
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002661 size = layout->stage[s].sampler_count * 16;
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07002662 state = anv_cmd_buffer_alloc_surface_state(cmd_buffer, size, 32);
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002663 memcpy(state.map, bindings->descriptors[s].samplers, size);
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07002664
2665 static const uint32_t sampler_state_opcodes[] = {
2666 [VK_SHADER_STAGE_VERTEX] = 43,
2667 [VK_SHADER_STAGE_TESS_CONTROL] = 44, /* HS */
2668 [VK_SHADER_STAGE_TESS_EVALUATION] = 45, /* DS */
2669 [VK_SHADER_STAGE_GEOMETRY] = 46,
2670 [VK_SHADER_STAGE_FRAGMENT] = 47,
2671 [VK_SHADER_STAGE_COMPUTE] = 0,
2672 };
2673
2674 anv_batch_emit(&cmd_buffer->batch,
2675 GEN8_3DSTATE_SAMPLER_STATE_POINTERS_VS,
2676 ._3DCommandSubOpcode = sampler_state_opcodes[s],
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07002677 .PointertoVSSamplerState = state.offset);
Kristian Høgsberg83c7e1f2015-05-13 14:43:08 -07002678 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002679 }
2680}
2681
2682static void
2683anv_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
2684{
2685 struct anv_pipeline *pipeline = cmd_buffer->pipeline;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002686 struct anv_bindings *bindings = cmd_buffer->bindings;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002687 uint32_t *p;
2688
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002689 uint32_t vb_emit = cmd_buffer->vb_dirty & pipeline->vb_used;
2690 const uint32_t num_buffers = __builtin_popcount(vb_emit);
2691 const uint32_t num_dwords = 1 + num_buffers * 4;
2692
2693 if (vb_emit) {
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002694 p = anv_batch_emitn(&cmd_buffer->batch, num_dwords,
2695 GEN8_3DSTATE_VERTEX_BUFFERS);
2696 uint32_t vb, i = 0;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002697 for_each_bit(vb, vb_emit) {
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07002698 struct anv_buffer *buffer = bindings->vb[vb].buffer;
2699 uint32_t offset = bindings->vb[vb].offset;
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002700
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002701 struct GEN8_VERTEX_BUFFER_STATE state = {
2702 .VertexBufferIndex = vb,
Kristian Høgsberg0997a7b2015-05-21 14:35:34 -07002703 .MemoryObjectControlState = GEN8_MOCS,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002704 .AddressModifyEnable = true,
2705 .BufferPitch = pipeline->binding_stride[vb],
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002706 .BufferStartingAddress = { buffer->bo, buffer->offset + offset },
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002707 .BufferSize = buffer->size - offset
2708 };
2709
2710 GEN8_VERTEX_BUFFER_STATE_pack(&cmd_buffer->batch, &p[1 + i * 4], &state);
2711 i++;
2712 }
2713 }
2714
2715 if (cmd_buffer->dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)
2716 anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
2717
2718 if (cmd_buffer->dirty & ANV_CMD_BUFFER_DESCRIPTOR_SET_DIRTY)
2719 flush_descriptor_sets(cmd_buffer);
2720
Kristian Høgsberg99883772015-05-26 09:40:10 -07002721 if (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_RS_DIRTY)) {
Kristian Høgsberg55b9b702015-05-11 22:23:38 -07002722 anv_batch_emit_merge(&cmd_buffer->batch,
2723 cmd_buffer->rs_state->state_sf, pipeline->state_sf);
Kristian Høgsberg99883772015-05-26 09:40:10 -07002724 anv_batch_emit_merge(&cmd_buffer->batch,
2725 cmd_buffer->rs_state->state_raster, pipeline->state_raster);
2726 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002727
Kristian Høgsbergcbe7ed42015-05-24 21:19:26 -07002728 if (cmd_buffer->ds_state &&
2729 (cmd_buffer->dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | ANV_CMD_BUFFER_DS_DIRTY)))
2730 anv_batch_emit_merge(&cmd_buffer->batch,
2731 cmd_buffer->ds_state->state_wm_depth_stencil,
2732 pipeline->state_wm_depth_stencil);
2733
Jason Ekstrand0f0b5ae2015-05-21 16:49:55 -07002734 cmd_buffer->vb_dirty &= ~vb_emit;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002735 cmd_buffer->dirty = 0;
2736}
2737
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002738void anv_CmdDraw(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002739 VkCmdBuffer cmdBuffer,
2740 uint32_t firstVertex,
2741 uint32_t vertexCount,
2742 uint32_t firstInstance,
2743 uint32_t instanceCount)
2744{
2745 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2746
2747 anv_cmd_buffer_flush_state(cmd_buffer);
2748
2749 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
2750 .VertexAccessType = SEQUENTIAL,
2751 .VertexCountPerInstance = vertexCount,
2752 .StartVertexLocation = firstVertex,
2753 .InstanceCount = instanceCount,
2754 .StartInstanceLocation = firstInstance,
2755 .BaseVertexLocation = 0);
2756}
2757
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002758void anv_CmdDrawIndexed(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002759 VkCmdBuffer cmdBuffer,
2760 uint32_t firstIndex,
2761 uint32_t indexCount,
2762 int32_t vertexOffset,
2763 uint32_t firstInstance,
2764 uint32_t instanceCount)
2765{
2766 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2767
2768 anv_cmd_buffer_flush_state(cmd_buffer);
2769
2770 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
2771 .VertexAccessType = RANDOM,
2772 .VertexCountPerInstance = indexCount,
2773 .StartVertexLocation = firstIndex,
2774 .InstanceCount = instanceCount,
2775 .StartInstanceLocation = firstInstance,
2776 .BaseVertexLocation = 0);
2777}
2778
2779static void
2780anv_batch_lrm(struct anv_batch *batch,
2781 uint32_t reg, struct anv_bo *bo, uint32_t offset)
2782{
2783 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
2784 .RegisterAddress = reg,
2785 .MemoryAddress = { bo, offset });
2786}
2787
2788static void
2789anv_batch_lri(struct anv_batch *batch, uint32_t reg, uint32_t imm)
2790{
2791 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_IMM,
2792 .RegisterOffset = reg,
2793 .DataDWord = imm);
2794}
2795
2796/* Auto-Draw / Indirect Registers */
2797#define GEN7_3DPRIM_END_OFFSET 0x2420
2798#define GEN7_3DPRIM_START_VERTEX 0x2430
2799#define GEN7_3DPRIM_VERTEX_COUNT 0x2434
2800#define GEN7_3DPRIM_INSTANCE_COUNT 0x2438
2801#define GEN7_3DPRIM_START_INSTANCE 0x243C
2802#define GEN7_3DPRIM_BASE_VERTEX 0x2440
2803
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002804void anv_CmdDrawIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002805 VkCmdBuffer cmdBuffer,
2806 VkBuffer _buffer,
2807 VkDeviceSize offset,
2808 uint32_t count,
2809 uint32_t stride)
2810{
2811 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2812 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002813 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002814 uint32_t bo_offset = buffer->offset + offset;
2815
2816 anv_cmd_buffer_flush_state(cmd_buffer);
2817
2818 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
2819 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
2820 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
2821 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 12);
2822 anv_batch_lri(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, 0);
2823
2824 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
2825 .IndirectParameterEnable = true,
2826 .VertexAccessType = SEQUENTIAL);
2827}
2828
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002829void anv_CmdDrawIndexedIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002830 VkCmdBuffer cmdBuffer,
2831 VkBuffer _buffer,
2832 VkDeviceSize offset,
2833 uint32_t count,
2834 uint32_t stride)
2835{
2836 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2837 struct anv_buffer *buffer = (struct anv_buffer *) _buffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002838 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002839 uint32_t bo_offset = buffer->offset + offset;
2840
2841 anv_cmd_buffer_flush_state(cmd_buffer);
2842
2843 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_VERTEX_COUNT, bo, bo_offset);
2844 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_INSTANCE_COUNT, bo, bo_offset + 4);
2845 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_VERTEX, bo, bo_offset + 8);
2846 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_BASE_VERTEX, bo, bo_offset + 12);
2847 anv_batch_lrm(&cmd_buffer->batch, GEN7_3DPRIM_START_INSTANCE, bo, bo_offset + 16);
2848
2849 anv_batch_emit(&cmd_buffer->batch, GEN8_3DPRIMITIVE,
2850 .IndirectParameterEnable = true,
2851 .VertexAccessType = RANDOM);
2852}
2853
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002854void anv_CmdDispatch(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002855 VkCmdBuffer cmdBuffer,
2856 uint32_t x,
2857 uint32_t y,
2858 uint32_t z)
2859{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002860 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002861}
2862
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002863void anv_CmdDispatchIndirect(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002864 VkCmdBuffer cmdBuffer,
2865 VkBuffer buffer,
2866 VkDeviceSize offset)
2867{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002868 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002869}
2870
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002871void anv_CmdSetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002872 VkCmdBuffer cmdBuffer,
2873 VkEvent event,
2874 VkPipeEvent pipeEvent)
2875{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002876 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002877}
2878
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002879void anv_CmdResetEvent(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002880 VkCmdBuffer cmdBuffer,
2881 VkEvent event,
2882 VkPipeEvent pipeEvent)
2883{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002884 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002885}
2886
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002887void anv_CmdWaitEvents(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002888 VkCmdBuffer cmdBuffer,
2889 VkWaitEvent waitEvent,
2890 uint32_t eventCount,
2891 const VkEvent* pEvents,
2892 uint32_t memBarrierCount,
2893 const void** ppMemBarriers)
2894{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002895 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002896}
2897
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002898void anv_CmdPipelineBarrier(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002899 VkCmdBuffer cmdBuffer,
2900 VkWaitEvent waitEvent,
2901 uint32_t pipeEventCount,
2902 const VkPipeEvent* pPipeEvents,
2903 uint32_t memBarrierCount,
2904 const void** ppMemBarriers)
2905{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002906 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002907}
2908
2909static void
2910anv_batch_emit_ps_depth_count(struct anv_batch *batch,
2911 struct anv_bo *bo, uint32_t offset)
2912{
2913 anv_batch_emit(batch, GEN8_PIPE_CONTROL,
2914 .DestinationAddressType = DAT_PPGTT,
2915 .PostSyncOperation = WritePSDepthCount,
2916 .Address = { bo, offset }); /* FIXME: This is only lower 32 bits */
2917}
2918
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002919void anv_CmdBeginQuery(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002920 VkCmdBuffer cmdBuffer,
2921 VkQueryPool queryPool,
2922 uint32_t slot,
2923 VkQueryControlFlags flags)
2924{
2925 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2926 struct anv_query_pool *pool = (struct anv_query_pool *) queryPool;
2927
2928 switch (pool->type) {
2929 case VK_QUERY_TYPE_OCCLUSION:
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07002930 anv_batch_emit_ps_depth_count(&cmd_buffer->batch, &pool->bo,
2931 slot * sizeof(struct anv_query_pool_slot));
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002932 break;
2933
2934 case VK_QUERY_TYPE_PIPELINE_STATISTICS:
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002935 default:
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07002936 unreachable("");
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002937 }
2938}
2939
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002940void anv_CmdEndQuery(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002941 VkCmdBuffer cmdBuffer,
2942 VkQueryPool queryPool,
2943 uint32_t slot)
2944{
2945 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2946 struct anv_query_pool *pool = (struct anv_query_pool *) queryPool;
2947
2948 switch (pool->type) {
2949 case VK_QUERY_TYPE_OCCLUSION:
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07002950 anv_batch_emit_ps_depth_count(&cmd_buffer->batch, &pool->bo,
2951 slot * sizeof(struct anv_query_pool_slot) + 8);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002952 break;
2953
2954 case VK_QUERY_TYPE_PIPELINE_STATISTICS:
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002955 default:
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07002956 unreachable("");
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002957 }
2958}
2959
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002960void anv_CmdResetQueryPool(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002961 VkCmdBuffer cmdBuffer,
2962 VkQueryPool queryPool,
2963 uint32_t startQuery,
2964 uint32_t queryCount)
2965{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07002966 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002967}
2968
Kristian Høgsbergae9ac472015-05-18 17:04:32 -07002969#define TIMESTAMP 0x2358
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002970
Kristian Høgsberg454345d2015-05-17 16:33:48 -07002971void anv_CmdWriteTimestamp(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002972 VkCmdBuffer cmdBuffer,
2973 VkTimestampType timestampType,
2974 VkBuffer destBuffer,
2975 VkDeviceSize destOffset)
2976{
2977 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
2978 struct anv_buffer *buffer = (struct anv_buffer *) destBuffer;
Kristian Høgsberg099faa12015-05-11 22:19:58 -07002979 struct anv_bo *bo = buffer->bo;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002980
2981 switch (timestampType) {
2982 case VK_TIMESTAMP_TYPE_TOP:
2983 anv_batch_emit(&cmd_buffer->batch, GEN8_MI_STORE_REGISTER_MEM,
2984 .RegisterAddress = TIMESTAMP,
2985 .MemoryAddress = { bo, buffer->offset + destOffset });
Kristian Høgsbergae9ac472015-05-18 17:04:32 -07002986 anv_batch_emit(&cmd_buffer->batch, GEN8_MI_STORE_REGISTER_MEM,
2987 .RegisterAddress = TIMESTAMP + 4,
2988 .MemoryAddress = { bo, buffer->offset + destOffset + 4 });
Kristian Høgsberg769785c2015-05-08 22:32:37 -07002989 break;
2990
2991 case VK_TIMESTAMP_TYPE_BOTTOM:
2992 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
2993 .DestinationAddressType = DAT_PPGTT,
2994 .PostSyncOperation = WriteTimestamp,
2995 .Address = /* FIXME: This is only lower 32 bits */
2996 { bo, buffer->offset + destOffset });
2997 break;
2998
2999 default:
3000 break;
3001 }
3002}
3003
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07003004#define alu_opcode(v) __gen_field((v), 20, 31)
3005#define alu_operand1(v) __gen_field((v), 10, 19)
3006#define alu_operand2(v) __gen_field((v), 0, 9)
3007#define alu(opcode, operand1, operand2) \
3008 alu_opcode(opcode) | alu_operand1(operand1) | alu_operand2(operand2)
3009
3010#define OPCODE_NOOP 0x000
3011#define OPCODE_LOAD 0x080
3012#define OPCODE_LOADINV 0x480
3013#define OPCODE_LOAD0 0x081
3014#define OPCODE_LOAD1 0x481
3015#define OPCODE_ADD 0x100
3016#define OPCODE_SUB 0x101
3017#define OPCODE_AND 0x102
3018#define OPCODE_OR 0x103
3019#define OPCODE_XOR 0x104
3020#define OPCODE_STORE 0x180
3021#define OPCODE_STOREINV 0x580
3022
3023#define OPERAND_R0 0x00
3024#define OPERAND_R1 0x01
3025#define OPERAND_R2 0x02
3026#define OPERAND_R3 0x03
3027#define OPERAND_R4 0x04
3028#define OPERAND_SRCA 0x20
3029#define OPERAND_SRCB 0x21
3030#define OPERAND_ACCU 0x31
3031#define OPERAND_ZF 0x32
3032#define OPERAND_CF 0x33
3033
3034#define CS_GPR(n) (0x2600 + (n) * 8)
3035
3036static void
3037emit_load_alu_reg_u64(struct anv_batch *batch, uint32_t reg,
3038 struct anv_bo *bo, uint32_t offset)
3039{
3040 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3041 .RegisterAddress = reg,
3042 .MemoryAddress = { bo, offset });
3043 anv_batch_emit(batch, GEN8_MI_LOAD_REGISTER_MEM,
3044 .RegisterAddress = reg + 4,
3045 .MemoryAddress = { bo, offset + 4 });
3046}
3047
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003048void anv_CmdCopyQueryPoolResults(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003049 VkCmdBuffer cmdBuffer,
3050 VkQueryPool queryPool,
3051 uint32_t startQuery,
3052 uint32_t queryCount,
3053 VkBuffer destBuffer,
3054 VkDeviceSize destOffset,
3055 VkDeviceSize destStride,
3056 VkQueryResultFlags flags)
3057{
Kristian Høgsberg82ddab42015-05-18 17:03:58 -07003058 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3059 struct anv_query_pool *pool = (struct anv_query_pool *) queryPool;
3060 struct anv_buffer *buffer = (struct anv_buffer *) destBuffer;
3061 uint32_t slot_offset, dst_offset;
3062
3063 if (flags & VK_QUERY_RESULT_WITH_AVAILABILITY_BIT) {
3064 /* Where is the availabilty info supposed to go? */
3065 anv_finishme("VK_QUERY_RESULT_WITH_AVAILABILITY_BIT");
3066 return;
3067 }
3068
3069 assert(pool->type == VK_QUERY_TYPE_OCCLUSION);
3070
3071 /* FIXME: If we're not waiting, should we just do this on the CPU? */
3072 if (flags & VK_QUERY_RESULT_WAIT_BIT)
3073 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3074 .CommandStreamerStallEnable = true);
3075
3076 dst_offset = buffer->offset + destOffset;
3077 for (uint32_t i = 0; i < queryCount; i++) {
3078
3079 slot_offset = (startQuery + i) * sizeof(struct anv_query_pool_slot);
3080
3081 emit_load_alu_reg_u64(&cmd_buffer->batch, CS_GPR(0), &pool->bo, slot_offset);
3082 emit_load_alu_reg_u64(&cmd_buffer->batch, CS_GPR(1), &pool->bo, slot_offset + 8);
3083
3084 /* FIXME: We need to clamp the result for 32 bit. */
3085
3086 uint32_t *dw = anv_batch_emitn(&cmd_buffer->batch, 5, GEN8_MI_MATH);
3087 dw[1] = alu(OPCODE_LOAD, OPERAND_SRCA, OPERAND_R1);
3088 dw[2] = alu(OPCODE_LOAD, OPERAND_SRCB, OPERAND_R0);
3089 dw[3] = alu(OPCODE_SUB, 0, 0);
3090 dw[4] = alu(OPCODE_STORE, OPERAND_R2, OPERAND_ACCU);
3091
3092 anv_batch_emit(&cmd_buffer->batch, GEN8_MI_STORE_REGISTER_MEM,
3093 .RegisterAddress = CS_GPR(2),
3094 /* FIXME: This is only lower 32 bits */
3095 .MemoryAddress = { buffer->bo, dst_offset });
3096
3097 if (flags & VK_QUERY_RESULT_64_BIT)
3098 anv_batch_emit(&cmd_buffer->batch, GEN8_MI_STORE_REGISTER_MEM,
3099 .RegisterAddress = CS_GPR(2) + 4,
3100 /* FIXME: This is only lower 32 bits */
3101 .MemoryAddress = { buffer->bo, dst_offset + 4 });
3102
3103 dst_offset += destStride;
3104 }
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003105}
3106
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003107void anv_CmdInitAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003108 VkCmdBuffer cmdBuffer,
3109 VkPipelineBindPoint pipelineBindPoint,
3110 uint32_t startCounter,
3111 uint32_t counterCount,
3112 const uint32_t* pData)
3113{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003114 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003115}
3116
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003117void anv_CmdLoadAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003118 VkCmdBuffer cmdBuffer,
3119 VkPipelineBindPoint pipelineBindPoint,
3120 uint32_t startCounter,
3121 uint32_t counterCount,
3122 VkBuffer srcBuffer,
3123 VkDeviceSize srcOffset)
3124{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003125 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003126}
3127
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003128void anv_CmdSaveAtomicCounters(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003129 VkCmdBuffer cmdBuffer,
3130 VkPipelineBindPoint pipelineBindPoint,
3131 uint32_t startCounter,
3132 uint32_t counterCount,
3133 VkBuffer destBuffer,
3134 VkDeviceSize destOffset)
3135{
Jason Ekstrandffe9f602015-05-12 13:44:43 -07003136 stub();
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003137}
3138
Jason Ekstrand57153da2015-05-22 15:15:08 -07003139static void
3140anv_framebuffer_destroy(struct anv_device *device,
3141 struct anv_object *object,
3142 VkObjectType obj_type)
3143{
3144 struct anv_framebuffer *fb = (struct anv_framebuffer *)object;
3145
3146 assert(obj_type == VK_OBJECT_TYPE_FRAMEBUFFER);
3147
3148 anv_DestroyObject((VkDevice) device,
3149 VK_OBJECT_TYPE_DYNAMIC_VP_STATE,
3150 fb->vp_state);
3151
3152 anv_device_free(device, fb);
3153}
3154
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003155VkResult anv_CreateFramebuffer(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003156 VkDevice _device,
3157 const VkFramebufferCreateInfo* pCreateInfo,
3158 VkFramebuffer* pFramebuffer)
3159{
3160 struct anv_device *device = (struct anv_device *) _device;
3161 struct anv_framebuffer *framebuffer;
3162
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003163 static const struct anv_depth_stencil_view null_view =
3164 { .depth_format = D16_UNORM, .depth_stride = 0, .stencil_stride = 0 };
3165
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003166 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
3167
3168 framebuffer = anv_device_alloc(device, sizeof(*framebuffer), 8,
3169 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3170 if (framebuffer == NULL)
3171 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3172
Jason Ekstrand57153da2015-05-22 15:15:08 -07003173 framebuffer->base.destructor = anv_framebuffer_destroy;
3174
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003175 framebuffer->color_attachment_count = pCreateInfo->colorAttachmentCount;
3176 for (uint32_t i = 0; i < pCreateInfo->colorAttachmentCount; i++) {
3177 framebuffer->color_attachments[i] =
Kristian Høgsbergf5b0f132015-05-13 15:31:26 -07003178 (struct anv_surface_view *) pCreateInfo->pColorAttachments[i].view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003179 }
3180
3181 if (pCreateInfo->pDepthStencilAttachment) {
3182 framebuffer->depth_stencil =
3183 (struct anv_depth_stencil_view *) pCreateInfo->pDepthStencilAttachment->view;
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003184 } else {
3185 framebuffer->depth_stencil = &null_view;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003186 }
3187
3188 framebuffer->sample_count = pCreateInfo->sampleCount;
3189 framebuffer->width = pCreateInfo->width;
3190 framebuffer->height = pCreateInfo->height;
3191 framebuffer->layers = pCreateInfo->layers;
3192
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003193 vkCreateDynamicViewportState((VkDevice) device,
3194 &(VkDynamicVpStateCreateInfo) {
3195 .sType = VK_STRUCTURE_TYPE_DYNAMIC_VP_STATE_CREATE_INFO,
Jason Ekstrand912944e2015-05-25 17:03:57 -07003196 .viewportAndScissorCount = 1,
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003197 .pViewports = (VkViewport[]) {
3198 {
3199 .originX = 0,
3200 .originY = 0,
3201 .width = pCreateInfo->width,
3202 .height = pCreateInfo->height,
3203 .minDepth = 0,
3204 .maxDepth = 1
3205 },
3206 },
3207 .pScissors = (VkRect[]) {
3208 { { 0, 0 },
3209 { pCreateInfo->width, pCreateInfo->height } },
3210 }
3211 },
3212 &framebuffer->vp_state);
3213
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003214 *pFramebuffer = (VkFramebuffer) framebuffer;
3215
3216 return VK_SUCCESS;
3217}
3218
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003219VkResult anv_CreateRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003220 VkDevice _device,
3221 const VkRenderPassCreateInfo* pCreateInfo,
3222 VkRenderPass* pRenderPass)
3223{
3224 struct anv_device *device = (struct anv_device *) _device;
3225 struct anv_render_pass *pass;
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003226 size_t size;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003227
3228 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
3229
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003230 size = sizeof(*pass) +
3231 pCreateInfo->layers * sizeof(struct anv_render_pass_layer);
3232 pass = anv_device_alloc(device, size, 8,
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003233 VK_SYSTEM_ALLOC_TYPE_API_OBJECT);
3234 if (pass == NULL)
3235 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
3236
3237 pass->render_area = pCreateInfo->renderArea;
3238
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003239 pass->num_layers = pCreateInfo->layers;
3240
3241 pass->num_clear_layers = 0;
3242 for (uint32_t i = 0; i < pCreateInfo->layers; i++) {
3243 pass->layers[i].color_load_op = pCreateInfo->pColorLoadOps[i];
3244 pass->layers[i].clear_color = pCreateInfo->pColorLoadClearValues[i];
3245 if (pass->layers[i].color_load_op == VK_ATTACHMENT_LOAD_OP_CLEAR)
3246 pass->num_clear_layers++;
3247 }
3248
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003249 *pRenderPass = (VkRenderPass) pass;
3250
3251 return VK_SUCCESS;
3252}
3253
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07003254void
3255anv_cmd_buffer_fill_render_targets(struct anv_cmd_buffer *cmd_buffer)
3256{
3257 struct anv_framebuffer *framebuffer = cmd_buffer->framebuffer;
3258 struct anv_bindings *bindings = cmd_buffer->bindings;
3259
3260 for (uint32_t i = 0; i < framebuffer->color_attachment_count; i++) {
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003261 const struct anv_surface_view *view = framebuffer->color_attachments[i];
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07003262
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07003263 struct anv_state state =
3264 anv_cmd_buffer_alloc_surface_state(cmd_buffer, 64, 64);
3265 memcpy(state.map, view->surface_state.map, 64);
3266
Jason Ekstrand519fe762015-05-21 15:55:27 -07003267 /* The address goes in dwords 8 and 9 of the SURFACE_STATE */
3268 *(uint64_t *)(state.map + 8 * 4) =
3269 anv_reloc_list_add(&cmd_buffer->surface_relocs,
3270 state.offset + 8 * 4,
3271 view->bo, view->offset);
3272
Kristian Høgsberga1bd4262015-05-19 14:14:24 -07003273 bindings->descriptors[VK_SHADER_STAGE_FRAGMENT].surfaces[i] = state.offset;
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07003274 }
3275 cmd_buffer->dirty |= ANV_CMD_BUFFER_DESCRIPTOR_SET_DIRTY;
3276}
3277
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003278static void
3279anv_cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer,
3280 struct anv_render_pass *pass)
3281{
3282 const struct anv_depth_stencil_view *view =
3283 cmd_buffer->framebuffer->depth_stencil;
3284
3285 /* FIXME: Implement the PMA stall W/A */
3286
3287 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DEPTH_BUFFER,
3288 .SurfaceType = SURFTYPE_2D,
3289 .DepthWriteEnable = view->depth_stride > 0,
3290 .StencilWriteEnable = view->stencil_stride > 0,
3291 .HierarchicalDepthBufferEnable = false,
3292 .SurfaceFormat = view->depth_format,
3293 .SurfacePitch = view->depth_stride > 0 ? view->depth_stride - 1 : 0,
3294 .SurfaceBaseAddress = { view->bo, view->depth_offset },
3295 .Height = pass->render_area.extent.height - 1,
3296 .Width = pass->render_area.extent.width - 1,
3297 .LOD = 0,
3298 .Depth = 1 - 1,
3299 .MinimumArrayElement = 0,
3300 .DepthBufferObjectControlState = GEN8_MOCS,
3301 .RenderTargetViewExtent = 1 - 1,
3302 .SurfaceQPitch = 0);
3303
3304 /* Disable hierarchial depth buffers. */
3305 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_HIER_DEPTH_BUFFER);
3306
3307 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_STENCIL_BUFFER,
3308 .StencilBufferEnable = view->stencil_stride > 0,
3309 .StencilBufferObjectControlState = GEN8_MOCS,
3310 .SurfacePitch = view->stencil_stride > 0 ? view->stencil_stride - 1 : 0,
3311 .SurfaceBaseAddress = { view->bo, view->stencil_offset },
3312 .SurfaceQPitch = 0);
3313
3314 /* Clear the clear params. */
3315 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_CLEAR_PARAMS);
3316}
3317
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003318void anv_CmdBeginRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003319 VkCmdBuffer cmdBuffer,
3320 const VkRenderPassBegin* pRenderPassBegin)
3321{
3322 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *) cmdBuffer;
3323 struct anv_render_pass *pass = (struct anv_render_pass *) pRenderPassBegin->renderPass;
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003324 struct anv_framebuffer *framebuffer =
3325 (struct anv_framebuffer *) pRenderPassBegin->framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003326
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003327 cmd_buffer->framebuffer = framebuffer;
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003328
3329 anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_DRAWING_RECTANGLE,
3330 .ClippedDrawingRectangleYMin = pass->render_area.offset.y,
3331 .ClippedDrawingRectangleXMin = pass->render_area.offset.x,
3332 .ClippedDrawingRectangleYMax =
3333 pass->render_area.offset.y + pass->render_area.extent.height - 1,
3334 .ClippedDrawingRectangleXMax =
3335 pass->render_area.offset.x + pass->render_area.extent.width - 1,
3336 .DrawingRectangleOriginY = 0,
3337 .DrawingRectangleOriginX = 0);
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003338
Kristian Høgsberg3b9f32e2015-05-15 17:03:18 -07003339 anv_cmd_buffer_fill_render_targets(cmd_buffer);
Kristian Høgsbergbf096c92015-05-15 15:03:21 -07003340
Kristian Høgsberg37743f92015-05-22 22:59:12 -07003341 anv_cmd_buffer_emit_depth_stencil(cmd_buffer, pass);
3342
Kristian Høgsbergd77c34d2015-05-11 23:25:06 -07003343 anv_cmd_buffer_clear(cmd_buffer, pass);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003344}
3345
Kristian Høgsberg454345d2015-05-17 16:33:48 -07003346void anv_CmdEndRenderPass(
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003347 VkCmdBuffer cmdBuffer,
3348 VkRenderPass renderPass)
3349{
Jason Ekstranda1309c52015-05-13 22:13:05 -07003350 /* Emit a flushing pipe control at the end of a pass. This is kind of a
3351 * hack but it ensures that render targets always actually get written.
3352 * Eventually, we should do flushing based on image format transitions
3353 * or something of that nature.
3354 */
3355 struct anv_cmd_buffer *cmd_buffer = (struct anv_cmd_buffer *)cmdBuffer;
3356 anv_batch_emit(&cmd_buffer->batch, GEN8_PIPE_CONTROL,
3357 .PostSyncOperation = NoWrite,
3358 .RenderTargetCacheFlushEnable = true,
3359 .InstructionCacheInvalidateEnable = true,
3360 .DepthCacheFlushEnable = true,
3361 .VFCacheInvalidationEnable = true,
3362 .TextureCacheInvalidationEnable = true,
3363 .CommandStreamerStallEnable = true);
Kristian Høgsberg769785c2015-05-08 22:32:37 -07003364}
Kristian Høgsbergf8866472015-05-15 22:04:15 -07003365
3366void vkCmdDbgMarkerBegin(
3367 VkCmdBuffer cmdBuffer,
3368 const char* pMarker)
3369 __attribute__ ((visibility ("default")));
3370
3371void vkCmdDbgMarkerEnd(
3372 VkCmdBuffer cmdBuffer)
3373 __attribute__ ((visibility ("default")));
3374
3375VkResult vkDbgSetObjectTag(
3376 VkDevice device,
3377 VkObject object,
3378 size_t tagSize,
3379 const void* pTag)
3380 __attribute__ ((visibility ("default")));
3381
3382
3383void vkCmdDbgMarkerBegin(
3384 VkCmdBuffer cmdBuffer,
3385 const char* pMarker)
3386{
3387}
3388
3389void vkCmdDbgMarkerEnd(
3390 VkCmdBuffer cmdBuffer)
3391{
3392}
3393
3394VkResult vkDbgSetObjectTag(
3395 VkDevice device,
3396 VkObject object,
3397 size_t tagSize,
3398 const void* pTag)
3399{
3400 return VK_SUCCESS;
3401}