blob: 8a844f160b2feb5eccda627b173d71a913da3cc9 [file] [log] [blame]
Chia-I Wuf8385062015-01-04 16:27:24 +08001/*
2 * XGL
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Chia-I Wu <olv@lunarg.com>
26 */
27
28#include "buf.h"
29#include "cmd.h"
30#include "dev.h"
31#include "gpu.h"
32#include "img.h"
33#include "sampler.h"
34#include "view.h"
35#include "desc.h"
36
37enum intel_desc_surface_type {
38 INTEL_DESC_SURFACE_UNUSED,
39 INTEL_DESC_SURFACE_BUF,
40 INTEL_DESC_SURFACE_IMG,
41};
42
43struct intel_desc_surface {
44 const struct intel_mem *mem;
45 bool read_only;
46
47 enum intel_desc_surface_type type;
48 union {
49 const void *unused;
50 const struct intel_buf_view *buf;
51 const struct intel_img_view *img;
52 } u;
53};
54
55struct intel_desc_sampler {
56 const struct intel_sampler *sampler;
57};
58
59static bool desc_pool_init_desc_sizes(struct intel_desc_pool *pool,
60 const struct intel_gpu *gpu)
61{
62 pool->surface_desc_size = sizeof(struct intel_desc_surface);
63 pool->sampler_desc_size = sizeof(struct intel_desc_sampler);
64
65 return true;
66}
67
68XGL_RESULT intel_desc_pool_create(struct intel_dev *dev,
69 struct intel_desc_pool **pool_ret)
70{
71 const uint32_t surface_count = 16384;
72 const uint32_t sampler_count = 16384;
73 struct intel_desc_pool *pool;
74
75 pool = icd_alloc(sizeof(*pool), 0, XGL_SYSTEM_ALLOC_INTERNAL);
76 if (!pool)
77 return XGL_ERROR_OUT_OF_MEMORY;
78
79 memset(pool, 0, sizeof(*pool));
80
81 if (!desc_pool_init_desc_sizes(pool, dev->gpu)) {
82 icd_free(pool);
83 return XGL_ERROR_UNKNOWN;
84 }
85
86 intel_desc_offset_set(&pool->size,
87 pool->surface_desc_size * surface_count,
88 pool->sampler_desc_size * sampler_count);
89
90 pool->surfaces = icd_alloc(pool->size.surface,
91 64, XGL_SYSTEM_ALLOC_INTERNAL);
92 if (!pool->surfaces) {
93 icd_free(pool);
94 return XGL_ERROR_OUT_OF_MEMORY;
95 }
96
97 pool->samplers = icd_alloc(pool->size.sampler,
98 64, XGL_SYSTEM_ALLOC_INTERNAL);
99 if (!pool->samplers) {
100 icd_free(pool->surfaces);
101 icd_free(pool);
102 return XGL_ERROR_OUT_OF_MEMORY;
103 }
104
105 *pool_ret = pool;
106
107 return XGL_SUCCESS;
108}
109
110void intel_desc_pool_destroy(struct intel_desc_pool *pool)
111{
112 icd_free(pool->samplers);
113 icd_free(pool->surfaces);
114 icd_free(pool);
115}
116
117/**
118 * Get the size of a descriptor in the pool.
119 */
120static XGL_RESULT desc_pool_get_desc_size(const struct intel_desc_pool *pool,
121 XGL_DESCRIPTOR_TYPE type,
122 struct intel_desc_offset *size)
123{
124 uint32_t surface_size = 0, sampler_size = 0;
125
126 switch (type) {
127 case XGL_DESCRIPTOR_TYPE_SAMPLER:
128 sampler_size = pool->sampler_desc_size;
129 break;
130 case XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE:
131 surface_size = pool->surface_desc_size;
132 sampler_size = pool->sampler_desc_size;
133 break;
134 case XGL_DESCRIPTOR_TYPE_TEXTURE:
135 case XGL_DESCRIPTOR_TYPE_TEXTURE_BUFFER:
136 case XGL_DESCRIPTOR_TYPE_IMAGE:
137 case XGL_DESCRIPTOR_TYPE_IMAGE_BUFFER:
138 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
139 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER:
140 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER:
141 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
142 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
143 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
144 surface_size = pool->surface_desc_size;
145 break;
146 default:
147 assert(!"unknown descriptor type");
148 return XGL_ERROR_INVALID_VALUE;
149 break;
150 }
151
152 intel_desc_offset_set(size, surface_size, sampler_size);
153
154 return XGL_SUCCESS;
155}
156
157XGL_RESULT intel_desc_pool_alloc(struct intel_desc_pool *pool,
158 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
159 struct intel_desc_offset *begin,
160 struct intel_desc_offset *end)
161{
162 uint32_t surface_size = 0, sampler_size = 0;
163 struct intel_desc_offset alloc;
164 uint32_t i;
165
166 /* calculate sizes needed */
167 for (i = 0; i < info->count; i++) {
168 const XGL_DESCRIPTOR_TYPE_COUNT *tc = &info->pTypeCount[i];
169 struct intel_desc_offset size;
170 XGL_RESULT ret;
171
172 ret = desc_pool_get_desc_size(pool, tc->type, &size);
173 if (ret != XGL_SUCCESS)
174 return ret;
175
176 surface_size += size.surface * tc->count;
177 sampler_size += size.sampler * tc->count;
178 }
179
180 intel_desc_offset_set(&alloc, surface_size, sampler_size);
181
182 *begin = pool->cur;
183 intel_desc_offset_add(end, &pool->cur, &alloc);
184
185 if (!intel_desc_offset_within(end, &pool->size))
186 return XGL_ERROR_OUT_OF_MEMORY;
187
188 /* increment the writer pointer */
189 pool->cur = *end;
190
191 return XGL_SUCCESS;
192}
193
194static void desc_pool_validate_begin_end(const struct intel_desc_pool *pool,
195 const struct intel_desc_offset *begin,
196 const struct intel_desc_offset *end)
197{
198 assert(begin->surface % pool->surface_desc_size == 0 &&
199 begin->sampler % pool->sampler_desc_size == 0);
200 assert(end->surface % pool->surface_desc_size == 0 &&
201 end->sampler % pool->sampler_desc_size == 0);
202 assert(intel_desc_offset_within(end, &pool->size));
203}
204
205void intel_desc_pool_free(struct intel_desc_pool *pool,
206 const struct intel_desc_offset *begin,
207 const struct intel_desc_offset *end)
208{
209 desc_pool_validate_begin_end(pool, begin, end);
210
211 /* is it ok not to reclaim? */
212}
213
214XGL_RESULT intel_desc_pool_begin_update(struct intel_desc_pool *pool,
215 XGL_DESCRIPTOR_UPDATE_MODE mode)
216{
217 /* no-op */
218 return XGL_SUCCESS;
219}
220
221XGL_RESULT intel_desc_pool_end_update(struct intel_desc_pool *pool,
222 struct intel_cmd *cmd)
223{
224 /* No pipelined update. cmd_draw() will do the work. */
225 return XGL_SUCCESS;
226}
227
228void intel_desc_pool_clear(struct intel_desc_pool *pool,
229 const struct intel_desc_offset *begin,
230 const struct intel_desc_offset *end)
231{
232 uint32_t i;
233
234 desc_pool_validate_begin_end(pool, begin, end);
235
236 for (i = begin->surface; i < end->surface; i += pool->surface_desc_size) {
237 struct intel_desc_surface *desc = (struct intel_desc_surface *)
238 ((char *) pool->surfaces + i);
239
240 desc->mem = NULL;
241 desc->type = INTEL_DESC_SURFACE_UNUSED;
242 desc->u.unused = NULL;
243 }
244
245 for (i = begin->sampler; i < end->sampler; i += pool->sampler_desc_size) {
246 struct intel_desc_sampler *desc = (struct intel_desc_sampler *)
247 ((char *) pool->samplers + i);
248
249 desc->sampler = NULL;
250 }
251}
252
253void intel_desc_pool_update(struct intel_desc_pool *pool,
254 const struct intel_desc_offset *begin,
255 const struct intel_desc_offset *end,
256 const struct intel_desc_surface *surfaces,
257 const struct intel_desc_sampler *samplers)
258{
259 desc_pool_validate_begin_end(pool, begin, end);
260
261 if (begin->surface < end->surface) {
262 memcpy((char *) pool->surfaces + begin->surface, surfaces,
263 end->surface - begin->surface);
264 }
265
266 if (begin->sampler < end->sampler) {
267 memcpy((char *) pool->samplers + begin->sampler, samplers,
268 end->sampler - begin->sampler);
269 }
270}
271
272void intel_desc_pool_copy(struct intel_desc_pool *pool,
273 const struct intel_desc_offset *begin,
274 const struct intel_desc_offset *end,
275 const struct intel_desc_offset *src)
276{
277 struct intel_desc_offset src_end;
278 const struct intel_desc_surface *surfaces;
279 const struct intel_desc_sampler *samplers;
280
281 /* no overlap */
282 assert(intel_desc_offset_within(src, begin) ||
283 intel_desc_offset_within(end, src));
284
285 /* no read past pool */
286 intel_desc_offset_sub(&src_end, end, begin);
287 intel_desc_offset_add(&src_end, src, &src_end);
288 assert(intel_desc_offset_within(&src_end, &pool->size));
289
290 surfaces = (const struct intel_desc_surface *)
291 ((const char *) pool->surfaces + src->surface);
292 samplers = (const struct intel_desc_surface *)
293 ((const char *) pool->samplers + src->sampler);
294
295 intel_desc_pool_update(pool, begin, end, surfaces, samplers);
296}
297
298void intel_desc_pool_read_surface(const struct intel_desc_pool *pool,
299 const struct intel_desc_offset *offset,
300 XGL_PIPELINE_SHADER_STAGE stage,
301 const struct intel_mem **mem,
302 bool *read_only,
303 const uint32_t **cmd,
304 uint32_t *cmd_len)
305{
306 const struct intel_desc_surface *desc;
307 struct intel_desc_offset end;
308
309 intel_desc_offset_set(&end,
310 offset->surface + pool->surface_desc_size, offset->sampler);
311 desc_pool_validate_begin_end(pool, offset, &end);
312
313 desc = (const struct intel_desc_surface *)
314 ((const char *) pool->surfaces + offset->surface);
315
316 *mem = desc->mem;
317 *read_only = desc->read_only;
318 switch (desc->type) {
319 case INTEL_DESC_SURFACE_BUF:
320 *cmd = (stage == XGL_SHADER_STAGE_FRAGMENT) ?
321 desc->u.buf->fs_cmd : desc->u.buf->cmd;
322 *cmd_len = desc->u.buf->cmd_len;
323 break;
324 case INTEL_DESC_SURFACE_IMG:
325 *cmd = desc->u.img->cmd;
326 *cmd_len = desc->u.img->cmd_len;
327 break;
328 case INTEL_DESC_SURFACE_UNUSED:
329 default:
330 *cmd = NULL;
331 *cmd_len = 0;
332 break;
333 }
334}
335
336void intel_desc_pool_read_sampler(const struct intel_desc_pool *pool,
337 const struct intel_desc_offset *offset,
338 const struct intel_sampler **sampler)
339{
340 const struct intel_desc_sampler *desc;
341 struct intel_desc_offset end;
342
343 intel_desc_offset_set(&end,
344 offset->surface, offset->sampler + pool->sampler_desc_size);
345 desc_pool_validate_begin_end(pool, offset, &end);
346
347 desc = (const struct intel_desc_sampler *)
348 ((const char *) pool->samplers + offset->sampler);
349
350 *sampler = desc->sampler;
351}
352
353static void desc_region_destroy(struct intel_obj *obj)
354{
355 struct intel_desc_region *region = intel_desc_region_from_obj(obj);
356
357 intel_desc_region_destroy(region);
358}
359
360XGL_RESULT intel_desc_region_create(struct intel_dev *dev,
361 XGL_DESCRIPTOR_REGION_USAGE usage,
362 uint32_t max_sets,
363 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
364 struct intel_desc_region **region_ret)
365{
366 struct intel_desc_region *region;
367 XGL_RESULT ret;
368
369 region = (struct intel_desc_region *)
370 intel_base_create(dev, sizeof(*region), dev->base.dbg,
371 XGL_DBG_OBJECT_DESCRIPTOR_REGION, info, 0);
372 if (!region)
373 return XGL_ERROR_OUT_OF_MEMORY;
374
375 region->dev = dev;
376
377 ret = intel_desc_pool_alloc(dev->desc_pool, info,
378 &region->pool_begin, &region->pool_end);
379 if (ret != XGL_SUCCESS) {
380 intel_base_destroy(&region->obj.base);
381 return ret;
382 }
383
384 /* point to head */
385 region->cur = region->pool_begin;
386
387 region->obj.destroy = desc_region_destroy;
388
389 *region_ret = region;
390
391 return XGL_SUCCESS;
392}
393
394void intel_desc_region_destroy(struct intel_desc_region *region)
395{
396 intel_desc_pool_free(region->dev->desc_pool,
397 &region->pool_begin, &region->pool_end);
398 intel_base_destroy(&region->obj.base);
399}
400
401XGL_RESULT intel_desc_region_alloc(struct intel_desc_region *region,
402 const struct intel_desc_layout *layout,
403 struct intel_desc_offset *begin,
404 struct intel_desc_offset *end)
405{
406 *begin = region->cur;
407 intel_desc_offset_add(end, &region->cur, &layout->pool_size);
408
409 if (!intel_desc_offset_within(end, &region->pool_end))
410 return XGL_ERROR_OUT_OF_MEMORY;
411
412 /* increment the writer pointer */
413 region->cur = *end;
414
415 return XGL_SUCCESS;
416}
417
418void intel_desc_region_free_all(struct intel_desc_region *region)
419{
420 /* reset to head */
421 region->cur = region->pool_begin;
422}
423
424static void desc_set_destroy(struct intel_obj *obj)
425{
426 struct intel_desc_set *set = intel_desc_set_from_obj(obj);
427
428 intel_desc_set_destroy(set);
429}
430
431XGL_RESULT intel_desc_set_create(struct intel_dev *dev,
432 struct intel_desc_region *region,
433 XGL_DESCRIPTOR_SET_USAGE usage,
434 const struct intel_desc_layout *layout,
435 struct intel_desc_set **set_ret)
436{
437 struct intel_desc_set *set;
438 XGL_RESULT ret;
439
440 set = (struct intel_desc_set *)
441 intel_base_create(dev, sizeof(*set), dev->base.dbg,
442 XGL_DBG_OBJECT_DESCRIPTOR_SET, NULL, 0);
443 if (!set)
444 return XGL_ERROR_OUT_OF_MEMORY;
445
446 set->pool = dev->desc_pool;
447 ret = intel_desc_region_alloc(region, layout,
448 &set->pool_begin, &set->pool_end);
449 if (ret != XGL_SUCCESS) {
450 intel_base_destroy(&set->obj.base);
451 return ret;
452 }
453
454 set->layout = layout;
455
456 set->obj.destroy = desc_set_destroy;
457
458 *set_ret = set;
459
460 return XGL_SUCCESS;
461}
462
463void intel_desc_set_destroy(struct intel_desc_set *set)
464{
465 intel_base_destroy(&set->obj.base);
466}
467
468static void desc_set_update(struct intel_desc_set *set,
469 const struct intel_desc_layout_iter *iter,
470 const struct intel_desc_surface *surfaces,
471 const struct intel_desc_sampler *samplers)
472{
473 struct intel_desc_offset begin, end;
474
475 intel_desc_offset_add(&begin, &set->pool_begin, &iter->offset_begin);
476 intel_desc_offset_add(&end, &set->pool_begin, &iter->offset_end);
477
478 intel_desc_pool_update(set->pool, &begin, &end, surfaces, samplers);
479}
480
481static bool desc_set_img_layout_read_only(XGL_IMAGE_LAYOUT layout)
482{
483 switch (layout) {
484 case XGL_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
485 case XGL_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
486 case XGL_IMAGE_LAYOUT_TRANSFER_SOURCE_OPTIMAL:
487 return true;
488 default:
489 return false;
490 }
491}
492
493void intel_desc_set_update_samplers(struct intel_desc_set *set,
494 const XGL_UPDATE_SAMPLERS *update)
495{
496 struct intel_desc_layout_iter iter;
497 uint32_t i;
498
499 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
500 return;
501
502 for (i = 0; i < update->count; i++) {
503 const struct intel_sampler *sampler =
504 intel_sampler((XGL_SAMPLER) update->pSamplers[i]);
505 struct intel_desc_sampler desc;
506
507 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER)
508 break;
509
510 desc.sampler = sampler;
511 desc_set_update(set, &iter, NULL, &desc);
512
513 if (!intel_desc_layout_advance_iter(set->layout, &iter))
514 break;
515 }
516}
517
518void intel_desc_set_update_sampler_textures(struct intel_desc_set *set,
519 const XGL_UPDATE_SAMPLER_TEXTURES *update)
520{
521 struct intel_desc_layout_iter iter;
522 const struct intel_sampler *immutable_sampler = NULL;
523 uint32_t i;
524
525 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
526 return;
527
528 for (i = 0; i < update->count; i++) {
529 const struct intel_sampler *sampler =
530 intel_sampler(update->pSamplerImageViews[i].pSampler);
531 const XGL_IMAGE_VIEW_ATTACH_INFO *info =
532 update->pSamplerImageViews[i].pImageView;
533 const struct intel_img_view *view = intel_img_view(info->view);
534 struct intel_desc_surface view_desc;
535 struct intel_desc_sampler sampler_desc;
536
537 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
538 return;
539
540 /* update every immutable sampler once */
541 if (immutable_sampler != iter.range->immutable_sampler) {
542 immutable_sampler = iter.range->immutable_sampler;
543
544 if (immutable_sampler) {
545 struct intel_desc_offset begin, end;
546
547 intel_desc_offset_add(&begin, &set->pool_begin,
548 &iter.offset_begin);
549 intel_desc_offset_set(&end, begin.surface,
550 begin.sampler + set->pool->sampler_desc_size);
551
552 sampler_desc.sampler = immutable_sampler;
553 intel_desc_pool_update(set->pool, &begin, &end,
554 NULL, &sampler_desc);
555 }
556 }
557
558 view_desc.mem = view->img->obj.mem;
559 view_desc.read_only = desc_set_img_layout_read_only(info->layout);
560 view_desc.type = INTEL_DESC_SURFACE_IMG;
561 view_desc.u.img = view;
562
563 sampler_desc.sampler = sampler;
564
565 desc_set_update(set, &iter, &view_desc, &sampler_desc);
566
567 if (!intel_desc_layout_advance_iter(set->layout, &iter))
568 break;
569 }
570}
571
572void intel_desc_set_update_images(struct intel_desc_set *set,
573 const XGL_UPDATE_IMAGES *update)
574{
575 struct intel_desc_layout_iter iter;
576 uint32_t i;
577
578 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
579 return;
580
581 for (i = 0; i < update->count; i++) {
582 const XGL_IMAGE_VIEW_ATTACH_INFO *info = update->pImageViews[i];
583 const struct intel_img_view *view = intel_img_view(info->view);
584 struct intel_desc_surface desc;
585
586 if (iter.type != update->descriptorType)
587 break;
588
589 desc.mem = view->img->obj.mem;
590 desc.read_only = desc_set_img_layout_read_only(info->layout);
591 desc.type = INTEL_DESC_SURFACE_IMG;
592 desc.u.img = view;
593 desc_set_update(set, &iter, &desc, NULL);
594
595 if (!intel_desc_layout_advance_iter(set->layout, &iter))
596 break;
597 }
598}
599
600void intel_desc_set_update_buffers(struct intel_desc_set *set,
601 const XGL_UPDATE_BUFFERS *update)
602{
603 struct intel_desc_layout_iter iter;
604 uint32_t i;
605
606 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
607 return;
608
609 for (i = 0; i < update->count; i++) {
610 const XGL_BUFFER_VIEW_ATTACH_INFO *info = update->pBufferViews[i];
611 const struct intel_buf_view *view = intel_buf_view(info->view);
612 struct intel_desc_surface desc;
613
614 if (iter.type != update->descriptorType)
615 break;
616
617 desc.mem = view->buf->obj.mem;
618 desc.read_only = false;
619 desc.type = INTEL_DESC_SURFACE_BUF;
620 desc.u.buf = view;
621 desc_set_update(set, &iter, &desc, NULL);
622
623 if (!intel_desc_layout_advance_iter(set->layout, &iter))
624 break;
625 }
626}
627
628void intel_desc_set_update_as_copy(struct intel_desc_set *set,
629 const XGL_UPDATE_AS_COPY *update)
630{
631 const struct intel_desc_set *src_set =
632 intel_desc_set(update->descriptorSet);
633 struct intel_desc_layout_iter iter, src_iter;
634 struct intel_desc_offset begin, end, src_begin;
635 uint32_t i;
636
637 /* disallow combined sampler textures */
638 if (update->descriptorType == XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
639 return;
640
641 /* no update->index? */
642 if (!intel_desc_layout_find_index(set->layout, 0, &iter))
643 return;
644 if (!intel_desc_layout_find_index(src_set->layout,
645 update->descriptorIndex, &src_iter))
646 return;
647
648 intel_desc_offset_add(&begin, &set->pool_begin, &iter.offset_begin);
649 intel_desc_offset_add(&src_begin, &src_set->pool_begin,
650 &src_iter.offset_end);
651
652 /* advance to end */
653 for (i = 0; i < update->count; i++) {
654 if (iter.type != update->descriptorType ||
655 src_iter.type != update->descriptorType ||
656 !intel_desc_layout_advance_iter(set->layout, &iter) ||
657 !intel_desc_layout_advance_iter(src_set->layout, &src_iter))
658 break;
659 }
660 if (i < update->count)
661 return;
662
663 intel_desc_offset_add(&end, &src_set->pool_begin, &iter.offset_end);
664
665 intel_desc_pool_copy(src_set->pool, &begin, &end, &src_begin);
666}
667
668static void desc_layout_destroy(struct intel_obj *obj)
669{
670 struct intel_desc_layout *layout = intel_desc_layout_from_obj(obj);
671
672 intel_desc_layout_destroy(layout);
673}
674
675static XGL_RESULT desc_layout_alloc_ranges(struct intel_desc_layout *layout,
676 const struct intel_desc_pool *pool,
677 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
678{
679 /* calculate counts */
680 while (info) {
681 if (info->sType !=
682 XGL_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)
683 return XGL_ERROR_INVALID_POINTER;
684
685 switch (info->descriptorType) {
686 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
687 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
688 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
689 layout->dynamic_desc_count += info->count;
690 break;
691 default:
692 break;
693 }
694
695 layout->range_count++;
696 info = info->pNext;
697 }
698
699 layout->ranges = icd_alloc(sizeof(layout->ranges[0]) *
700 layout->range_count, 0, XGL_SYSTEM_ALLOC_INTERNAL);
701 if (!layout->ranges)
702 return XGL_ERROR_OUT_OF_MEMORY;
703
704 memset(layout->ranges, 0, sizeof(layout->ranges[0]) *
705 layout->range_count);
706
707 return XGL_SUCCESS;
708}
709
710static XGL_RESULT desc_layout_init_ranges(struct intel_desc_layout *layout,
711 const struct intel_desc_pool *pool,
712 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
713{
714 struct intel_desc_offset offset;
715 uint32_t index, i;
716 XGL_RESULT ret;
717
718 ret = desc_layout_alloc_ranges(layout, pool, info);
719 if (ret != XGL_SUCCESS)
720 return ret;
721
722 if (layout->prior_layout) {
723 index = layout->prior_layout->end;
724 offset = layout->prior_layout->pool_size;
725 } else {
726 index = 0;
727 intel_desc_offset_set(&offset, 0, 0);
728 }
729
730 layout->begin = index;
731
732 /* initialize the ranges */
733 for (i = 0; i < layout->range_count; i++, info = info->pNext) {
734 struct intel_desc_layout_range *range = &layout->ranges[i];
735 struct intel_desc_offset size;
736
737 ret = desc_pool_get_desc_size(pool, info->descriptorType, &size);
738 if (ret != XGL_SUCCESS)
739 return ret;
740
741 /* info->stageFlags does not gain us anything */
742 range->type = info->descriptorType;
743
744 range->begin = index;
745 range->offset = offset;
746
747 if (info->immutableSampler != XGL_NULL_HANDLE) {
748 range->immutable_sampler = intel_sampler(info->immutableSampler);
749 /* do not increment sampler offset */
750 intel_desc_offset_set(&range->increment, size.surface, 0);
751 } else {
752 range->immutable_sampler = NULL;
753 range->increment = size;
754 }
755
756 /* increment index and offset */
757 index += info->count;
758 intel_desc_offset_mad(&size, &range->increment, &size,
759 info->count - 1);
760 intel_desc_offset_add(&offset, &offset, &size);
761
762 range->end = index;
763 }
764
765 layout->end = index;
766 layout->pool_size = offset;
767
768 return XGL_SUCCESS;
769}
770
771static XGL_RESULT desc_layout_init_bind_points(struct intel_desc_layout *layout,
772 XGL_FLAGS stage_flags,
773 const uint32_t *bind_points)
774{
775 if (!bind_points)
776 return XGL_ERROR_INVALID_POINTER;
777
778 layout->stage_flags = stage_flags;
779
780 if (stage_flags == XGL_SHADER_STAGE_FLAGS_ALL) {
781 layout->bind_point_vs = *bind_points;
782 layout->bind_point_tcs = *bind_points;
783 layout->bind_point_tes = *bind_points;
784 layout->bind_point_gs = *bind_points;
785 layout->bind_point_fs = *bind_points;
786 layout->bind_point_cs = *bind_points;
787 } else {
788 if (stage_flags & XGL_SHADER_STAGE_FLAGS_VERTEX_BIT)
789 layout->bind_point_vs = *bind_points++;
790 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_CONTROL_BIT)
791 layout->bind_point_tcs = *bind_points++;
792 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_EVALUATION_BIT)
793 layout->bind_point_tes = *bind_points++;
794 if (stage_flags & XGL_SHADER_STAGE_FLAGS_GEOMETRY_BIT)
795 layout->bind_point_gs = *bind_points++;
796 if (stage_flags & XGL_SHADER_STAGE_FLAGS_FRAGMENT_BIT)
797 layout->bind_point_fs = *bind_points++;
798 if (stage_flags & XGL_SHADER_STAGE_FLAGS_COMPUTE_BIT)
799 layout->bind_point_cs = *bind_points++;
800 }
801
802 return XGL_SUCCESS;
803}
804
805XGL_RESULT intel_desc_layout_create(struct intel_dev *dev,
806 XGL_FLAGS stage_flags,
807 const uint32_t *bind_points,
808 const struct intel_desc_layout *prior_layout,
809 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info,
810 struct intel_desc_layout **layout_ret)
811{
812 struct intel_desc_layout *layout;
813 XGL_RESULT ret;
814
815 layout = (struct intel_desc_layout *)
816 intel_base_create(dev, sizeof(*layout), dev->base.dbg,
817 XGL_DBG_OBJECT_DESCRIPTOR_SET_LAYOUT, info, 0);
818 if (!layout)
819 return XGL_ERROR_OUT_OF_MEMORY;
820
821 layout->prior_layout = prior_layout;
822
823 ret = desc_layout_init_bind_points(layout, stage_flags, bind_points);
824 if (ret == XGL_SUCCESS)
825 ret = desc_layout_init_ranges(layout, dev->desc_pool, info);
826 if (ret != XGL_SUCCESS) {
827 intel_desc_layout_destroy(layout);
828 return ret;
829 }
830
831 layout->obj.destroy = desc_layout_destroy;
832
833 *layout_ret = layout;
834
835 return XGL_SUCCESS;
836}
837
838void intel_desc_layout_destroy(struct intel_desc_layout *layout)
839{
840 icd_free(layout->ranges);
841 intel_base_destroy(&layout->obj.base);
842}
843
844static void desc_layout_init_iter(const struct intel_desc_layout *sublayout,
845 uint32_t index,
846 struct intel_desc_layout_iter *iter)
847{
848 const struct intel_desc_layout_range *range;
849
850 assert(index >= sublayout->begin && index < sublayout->end);
851
852 /* find the range the index is in */
853 for (range = sublayout->ranges;; range++) {
854 assert(range < sublayout->ranges + sublayout->range_count);
855 if (index < range->end)
856 break;
857 }
858
859 /* current position */
860 iter->sublayout = sublayout;
861 iter->range = range;
862 iter->index = index;
863
864 iter->type = iter->range->type;
865 intel_desc_offset_mad(&iter->offset_begin, &range->increment,
866 &range->offset, iter->index - range->begin);
867 intel_desc_offset_add(&iter->offset_end, &iter->offset_begin,
868 &range->increment);
869}
870
871bool intel_desc_layout_find_bind_point(const struct intel_desc_layout *layout,
872 XGL_PIPELINE_SHADER_STAGE stage,
873 uint32_t set, uint32_t binding,
874 struct intel_desc_layout_iter *iter)
875{
876 /* find the layout at the bind point */
877 switch (stage) {
878#define CASE(stage, s) \
879 case XGL_SHADER_STAGE_ ##stage: \
880 while (layout) { \
881 if ((layout->stage_flags & \
882 XGL_SHADER_STAGE_FLAGS_ ##stage## _BIT) && \
883 layout->bind_point_ ##s == set) \
884 break; \
885 layout = layout->prior_layout; \
886 } \
887 break
888 CASE(VERTEX, vs);
889 CASE(TESS_CONTROL, tcs);
890 CASE(TESS_EVALUATION, tes);
891 CASE(GEOMETRY, gs);
892 CASE(FRAGMENT, fs);
893 CASE(COMPUTE, cs);
894#undef CASE
895 default:
896 assert(!"unknown shader stage");
897 layout = NULL;
898 break;
899 }
900
901 if (!layout || layout->begin + binding >= layout->end) {
902 memset(iter, 0, sizeof(*iter));
903 return false;
904 }
905
906 desc_layout_init_iter(layout, layout->begin + binding, iter);
907
908 return true;
909}
910
911bool intel_desc_layout_find_index(const struct intel_desc_layout *layout,
912 uint32_t index,
913 struct intel_desc_layout_iter *iter)
914{
915 if (index >= layout->begin) {
916 /* out of bound */
917 if (index >= layout->end)
918 layout = NULL;
919 } else {
920 while (true) {
921 layout = layout->prior_layout;
922 if (index >= layout->begin) {
923 assert(index < layout->end);
924 break;
925 }
926 }
927 }
928
929 if (!layout) {
930 memset(iter, 0, sizeof(*iter));
931 return false;
932 }
933
934 desc_layout_init_iter(layout, index, iter);
935
936 return true;
937}
938
939bool intel_desc_layout_advance_iter(const struct intel_desc_layout *layout,
940 struct intel_desc_layout_iter *iter)
941{
942 /* all descriptors traversed */
943 if (!iter->sublayout)
944 return false;
945
946 iter->index++;
947 if (iter->index >= iter->range->end) {
948 /* move to the next range */
949 iter->range++;
950
951 if (iter->range >= iter->sublayout->ranges +
952 iter->sublayout->range_count) {
953 /* find again as the chain is not doubly-linked */
954 const bool ret = intel_desc_layout_find_index(layout,
955 iter->index, iter);
956 if (!ret) {
957 iter->sublayout = NULL;
958 iter->range = NULL;
959 }
960
961 return ret;
962 }
963
964 iter->type = iter->range->type;
965 }
966
967 iter->offset_begin = iter->offset_end;
968 intel_desc_offset_add(&iter->offset_end, &iter->offset_end,
969 &iter->range->increment);
970
971 return true;
972}
973
974XGL_RESULT XGLAPI xglCreateDescriptorSetLayout(
975 XGL_DEVICE device,
976 XGL_FLAGS stageFlags,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -0600977 const uint32_t* pSetBindPoints,
Chia-I Wuf8385062015-01-04 16:27:24 +0800978 XGL_DESCRIPTOR_SET_LAYOUT priorSetLayout,
979 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO* pSetLayoutInfoList,
980 XGL_DESCRIPTOR_SET_LAYOUT* pSetLayout)
981{
982 struct intel_dev *dev = intel_dev(device);
983 struct intel_desc_layout *prior_layout = intel_desc_layout(priorSetLayout);
984
985 return intel_desc_layout_create(dev, stageFlags, pSetBindPoints,
986 prior_layout, pSetLayoutInfoList,
987 (struct intel_desc_layout **) pSetLayout);
988}
989
990XGL_RESULT XGLAPI xglBeginDescriptorRegionUpdate(
991 XGL_DEVICE device,
992 XGL_DESCRIPTOR_UPDATE_MODE updateMode)
993{
994 struct intel_dev *dev = intel_dev(device);
995 struct intel_desc_pool *pool = dev->desc_pool;
996
997 return intel_desc_pool_begin_update(pool, updateMode);
998}
999
1000XGL_RESULT XGLAPI xglEndDescriptorRegionUpdate(
1001 XGL_DEVICE device,
1002 XGL_CMD_BUFFER cmd_)
1003{
1004 struct intel_dev *dev = intel_dev(device);
1005 struct intel_desc_pool *pool = dev->desc_pool;
1006 struct intel_cmd *cmd = intel_cmd(cmd_);
1007
1008 return intel_desc_pool_end_update(pool, cmd);
1009}
1010
1011XGL_RESULT XGLAPI xglCreateDescriptorRegion(
1012 XGL_DEVICE device,
1013 XGL_DESCRIPTOR_REGION_USAGE regionUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001014 uint32_t maxSets,
Chia-I Wuf8385062015-01-04 16:27:24 +08001015 const XGL_DESCRIPTOR_REGION_CREATE_INFO* pCreateInfo,
1016 XGL_DESCRIPTOR_REGION* pDescriptorRegion)
1017{
1018 struct intel_dev *dev = intel_dev(device);
1019
1020 return intel_desc_region_create(dev, regionUsage, maxSets, pCreateInfo,
1021 (struct intel_desc_region **) pDescriptorRegion);
1022}
1023
1024XGL_RESULT XGLAPI xglClearDescriptorRegion(
1025 XGL_DESCRIPTOR_REGION descriptorRegion)
1026{
1027 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1028
1029 intel_desc_region_free_all(region);
1030
1031 return XGL_SUCCESS;
1032}
1033
1034XGL_RESULT XGLAPI xglAllocDescriptorSets(
1035 XGL_DESCRIPTOR_REGION descriptorRegion,
1036 XGL_DESCRIPTOR_SET_USAGE setUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001037 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001038 const XGL_DESCRIPTOR_SET_LAYOUT* pSetLayouts,
1039 XGL_DESCRIPTOR_SET* pDescriptorSets,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001040 uint32_t* pCount)
Chia-I Wuf8385062015-01-04 16:27:24 +08001041{
1042 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1043 struct intel_dev *dev = region->dev;
1044 XGL_RESULT ret = XGL_SUCCESS;
1045 uint32_t i;
1046
1047 for (i = 0; i < count; i++) {
1048 const struct intel_desc_layout *layout =
1049 intel_desc_layout((XGL_DESCRIPTOR_SET_LAYOUT) pSetLayouts[i]);
1050
1051 ret = intel_desc_set_create(dev, region, setUsage, layout,
1052 (struct intel_desc_set **) &pDescriptorSets[i]);
1053 if (ret != XGL_SUCCESS)
1054 break;
1055 }
1056
1057 if (pCount)
1058 *pCount = i;
1059
1060 return ret;
1061}
1062
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001063void XGLAPI xglClearDescriptorSets(
Chia-I Wuf8385062015-01-04 16:27:24 +08001064 XGL_DESCRIPTOR_REGION descriptorRegion,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001065 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001066 const XGL_DESCRIPTOR_SET* pDescriptorSets)
1067{
1068 uint32_t i;
1069
1070 for (i = 0; i < count; i++) {
1071 struct intel_desc_set *set =
1072 intel_desc_set((XGL_DESCRIPTOR_SET) pDescriptorSets[i]);
1073
1074 intel_desc_pool_clear(set->pool, &set->pool_begin, &set->pool_end);
1075 }
1076}
1077
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001078void XGLAPI xglUpdateDescriptors(
Chia-I Wuf8385062015-01-04 16:27:24 +08001079 XGL_DESCRIPTOR_SET descriptorSet,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001080 const void* pUpdateChain)
Chia-I Wuf8385062015-01-04 16:27:24 +08001081{
1082 struct intel_desc_set *set = intel_desc_set(descriptorSet);
1083 const union {
1084 struct {
1085 XGL_STRUCTURE_TYPE sType;
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001086 const void* pNext;
Chia-I Wuf8385062015-01-04 16:27:24 +08001087 } common;
1088
1089 XGL_UPDATE_SAMPLERS samplers;
1090 XGL_UPDATE_SAMPLER_TEXTURES sampler_textures;
1091 XGL_UPDATE_IMAGES images;
1092 XGL_UPDATE_BUFFERS buffers;
1093 XGL_UPDATE_AS_COPY as_copy;
1094 } *u = pUpdateChain;
1095
1096 while (u) {
1097 switch (u->common.sType) {
1098 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1099 intel_desc_set_update_samplers(set, &u->samplers);
1100 break;
1101 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1102 intel_desc_set_update_sampler_textures(set, &u->sampler_textures);
1103 break;
1104 case XGL_STRUCTURE_TYPE_UPDATE_IMAGES:
1105 intel_desc_set_update_images(set, &u->images);
1106 break;
1107 case XGL_STRUCTURE_TYPE_UPDATE_BUFFERS:
1108 intel_desc_set_update_buffers(set, &u->buffers);
1109 break;
1110 case XGL_STRUCTURE_TYPE_UPDATE_AS_COPY:
1111 intel_desc_set_update_as_copy(set, &u->as_copy);
1112 break;
1113 default:
1114 assert(!"unknown descriptor update");
1115 break;
1116 }
1117
1118 u = u->common.pNext;
1119 }
1120}