blob: 22213e7c2dea68406c4163422715d215a7490574 [file] [log] [blame]
Chia-I Wuf8385062015-01-04 16:27:24 +08001/*
2 * XGL
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Chia-I Wu <olv@lunarg.com>
26 */
27
28#include "buf.h"
29#include "cmd.h"
30#include "dev.h"
31#include "gpu.h"
32#include "img.h"
33#include "sampler.h"
34#include "view.h"
35#include "desc.h"
36
37enum intel_desc_surface_type {
38 INTEL_DESC_SURFACE_UNUSED,
39 INTEL_DESC_SURFACE_BUF,
40 INTEL_DESC_SURFACE_IMG,
41};
42
43struct intel_desc_surface {
44 const struct intel_mem *mem;
45 bool read_only;
46
47 enum intel_desc_surface_type type;
48 union {
49 const void *unused;
50 const struct intel_buf_view *buf;
51 const struct intel_img_view *img;
52 } u;
53};
54
55struct intel_desc_sampler {
56 const struct intel_sampler *sampler;
57};
58
59static bool desc_pool_init_desc_sizes(struct intel_desc_pool *pool,
60 const struct intel_gpu *gpu)
61{
62 pool->surface_desc_size = sizeof(struct intel_desc_surface);
63 pool->sampler_desc_size = sizeof(struct intel_desc_sampler);
64
65 return true;
66}
67
68XGL_RESULT intel_desc_pool_create(struct intel_dev *dev,
69 struct intel_desc_pool **pool_ret)
70{
71 const uint32_t surface_count = 16384;
72 const uint32_t sampler_count = 16384;
73 struct intel_desc_pool *pool;
74
75 pool = icd_alloc(sizeof(*pool), 0, XGL_SYSTEM_ALLOC_INTERNAL);
76 if (!pool)
77 return XGL_ERROR_OUT_OF_MEMORY;
78
79 memset(pool, 0, sizeof(*pool));
80
81 if (!desc_pool_init_desc_sizes(pool, dev->gpu)) {
82 icd_free(pool);
83 return XGL_ERROR_UNKNOWN;
84 }
85
86 intel_desc_offset_set(&pool->size,
87 pool->surface_desc_size * surface_count,
88 pool->sampler_desc_size * sampler_count);
89
90 pool->surfaces = icd_alloc(pool->size.surface,
91 64, XGL_SYSTEM_ALLOC_INTERNAL);
92 if (!pool->surfaces) {
93 icd_free(pool);
94 return XGL_ERROR_OUT_OF_MEMORY;
95 }
96
97 pool->samplers = icd_alloc(pool->size.sampler,
98 64, XGL_SYSTEM_ALLOC_INTERNAL);
99 if (!pool->samplers) {
100 icd_free(pool->surfaces);
101 icd_free(pool);
102 return XGL_ERROR_OUT_OF_MEMORY;
103 }
104
105 *pool_ret = pool;
106
107 return XGL_SUCCESS;
108}
109
110void intel_desc_pool_destroy(struct intel_desc_pool *pool)
111{
112 icd_free(pool->samplers);
113 icd_free(pool->surfaces);
114 icd_free(pool);
115}
116
117/**
118 * Get the size of a descriptor in the pool.
119 */
120static XGL_RESULT desc_pool_get_desc_size(const struct intel_desc_pool *pool,
121 XGL_DESCRIPTOR_TYPE type,
122 struct intel_desc_offset *size)
123{
124 uint32_t surface_size = 0, sampler_size = 0;
125
126 switch (type) {
127 case XGL_DESCRIPTOR_TYPE_SAMPLER:
128 sampler_size = pool->sampler_desc_size;
129 break;
130 case XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE:
131 surface_size = pool->surface_desc_size;
132 sampler_size = pool->sampler_desc_size;
133 break;
134 case XGL_DESCRIPTOR_TYPE_TEXTURE:
135 case XGL_DESCRIPTOR_TYPE_TEXTURE_BUFFER:
136 case XGL_DESCRIPTOR_TYPE_IMAGE:
137 case XGL_DESCRIPTOR_TYPE_IMAGE_BUFFER:
138 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
139 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER:
140 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER:
141 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
142 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
143 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
144 surface_size = pool->surface_desc_size;
145 break;
146 default:
147 assert(!"unknown descriptor type");
148 return XGL_ERROR_INVALID_VALUE;
149 break;
150 }
151
152 intel_desc_offset_set(size, surface_size, sampler_size);
153
154 return XGL_SUCCESS;
155}
156
157XGL_RESULT intel_desc_pool_alloc(struct intel_desc_pool *pool,
158 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
159 struct intel_desc_offset *begin,
160 struct intel_desc_offset *end)
161{
162 uint32_t surface_size = 0, sampler_size = 0;
163 struct intel_desc_offset alloc;
164 uint32_t i;
165
166 /* calculate sizes needed */
167 for (i = 0; i < info->count; i++) {
168 const XGL_DESCRIPTOR_TYPE_COUNT *tc = &info->pTypeCount[i];
169 struct intel_desc_offset size;
170 XGL_RESULT ret;
171
172 ret = desc_pool_get_desc_size(pool, tc->type, &size);
173 if (ret != XGL_SUCCESS)
174 return ret;
175
176 surface_size += size.surface * tc->count;
177 sampler_size += size.sampler * tc->count;
178 }
179
180 intel_desc_offset_set(&alloc, surface_size, sampler_size);
181
182 *begin = pool->cur;
183 intel_desc_offset_add(end, &pool->cur, &alloc);
184
185 if (!intel_desc_offset_within(end, &pool->size))
186 return XGL_ERROR_OUT_OF_MEMORY;
187
188 /* increment the writer pointer */
189 pool->cur = *end;
190
191 return XGL_SUCCESS;
192}
193
194static void desc_pool_validate_begin_end(const struct intel_desc_pool *pool,
195 const struct intel_desc_offset *begin,
196 const struct intel_desc_offset *end)
197{
198 assert(begin->surface % pool->surface_desc_size == 0 &&
199 begin->sampler % pool->sampler_desc_size == 0);
200 assert(end->surface % pool->surface_desc_size == 0 &&
201 end->sampler % pool->sampler_desc_size == 0);
202 assert(intel_desc_offset_within(end, &pool->size));
203}
204
205void intel_desc_pool_free(struct intel_desc_pool *pool,
206 const struct intel_desc_offset *begin,
207 const struct intel_desc_offset *end)
208{
209 desc_pool_validate_begin_end(pool, begin, end);
210
211 /* is it ok not to reclaim? */
212}
213
214XGL_RESULT intel_desc_pool_begin_update(struct intel_desc_pool *pool,
215 XGL_DESCRIPTOR_UPDATE_MODE mode)
216{
217 /* no-op */
218 return XGL_SUCCESS;
219}
220
221XGL_RESULT intel_desc_pool_end_update(struct intel_desc_pool *pool,
222 struct intel_cmd *cmd)
223{
224 /* No pipelined update. cmd_draw() will do the work. */
225 return XGL_SUCCESS;
226}
227
228void intel_desc_pool_clear(struct intel_desc_pool *pool,
229 const struct intel_desc_offset *begin,
230 const struct intel_desc_offset *end)
231{
232 uint32_t i;
233
234 desc_pool_validate_begin_end(pool, begin, end);
235
236 for (i = begin->surface; i < end->surface; i += pool->surface_desc_size) {
237 struct intel_desc_surface *desc = (struct intel_desc_surface *)
238 ((char *) pool->surfaces + i);
239
240 desc->mem = NULL;
241 desc->type = INTEL_DESC_SURFACE_UNUSED;
242 desc->u.unused = NULL;
243 }
244
245 for (i = begin->sampler; i < end->sampler; i += pool->sampler_desc_size) {
246 struct intel_desc_sampler *desc = (struct intel_desc_sampler *)
247 ((char *) pool->samplers + i);
248
249 desc->sampler = NULL;
250 }
251}
252
253void intel_desc_pool_update(struct intel_desc_pool *pool,
254 const struct intel_desc_offset *begin,
255 const struct intel_desc_offset *end,
256 const struct intel_desc_surface *surfaces,
257 const struct intel_desc_sampler *samplers)
258{
259 desc_pool_validate_begin_end(pool, begin, end);
260
261 if (begin->surface < end->surface) {
262 memcpy((char *) pool->surfaces + begin->surface, surfaces,
263 end->surface - begin->surface);
264 }
265
266 if (begin->sampler < end->sampler) {
267 memcpy((char *) pool->samplers + begin->sampler, samplers,
268 end->sampler - begin->sampler);
269 }
270}
271
272void intel_desc_pool_copy(struct intel_desc_pool *pool,
273 const struct intel_desc_offset *begin,
274 const struct intel_desc_offset *end,
275 const struct intel_desc_offset *src)
276{
277 struct intel_desc_offset src_end;
278 const struct intel_desc_surface *surfaces;
279 const struct intel_desc_sampler *samplers;
280
281 /* no overlap */
282 assert(intel_desc_offset_within(src, begin) ||
283 intel_desc_offset_within(end, src));
284
285 /* no read past pool */
286 intel_desc_offset_sub(&src_end, end, begin);
287 intel_desc_offset_add(&src_end, src, &src_end);
288 assert(intel_desc_offset_within(&src_end, &pool->size));
289
290 surfaces = (const struct intel_desc_surface *)
291 ((const char *) pool->surfaces + src->surface);
Tony Barbour379e0a72015-02-05 11:09:34 -0700292 samplers = (const struct intel_desc_sampler *)
Chia-I Wuf8385062015-01-04 16:27:24 +0800293 ((const char *) pool->samplers + src->sampler);
294
295 intel_desc_pool_update(pool, begin, end, surfaces, samplers);
296}
297
Chia-I Wuf8385062015-01-04 16:27:24 +0800298static void desc_region_destroy(struct intel_obj *obj)
299{
300 struct intel_desc_region *region = intel_desc_region_from_obj(obj);
301
302 intel_desc_region_destroy(region);
303}
304
305XGL_RESULT intel_desc_region_create(struct intel_dev *dev,
306 XGL_DESCRIPTOR_REGION_USAGE usage,
307 uint32_t max_sets,
308 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
309 struct intel_desc_region **region_ret)
310{
311 struct intel_desc_region *region;
312 XGL_RESULT ret;
313
314 region = (struct intel_desc_region *)
315 intel_base_create(dev, sizeof(*region), dev->base.dbg,
316 XGL_DBG_OBJECT_DESCRIPTOR_REGION, info, 0);
317 if (!region)
318 return XGL_ERROR_OUT_OF_MEMORY;
319
320 region->dev = dev;
321
322 ret = intel_desc_pool_alloc(dev->desc_pool, info,
323 &region->pool_begin, &region->pool_end);
324 if (ret != XGL_SUCCESS) {
325 intel_base_destroy(&region->obj.base);
326 return ret;
327 }
328
329 /* point to head */
330 region->cur = region->pool_begin;
331
332 region->obj.destroy = desc_region_destroy;
333
334 *region_ret = region;
335
336 return XGL_SUCCESS;
337}
338
339void intel_desc_region_destroy(struct intel_desc_region *region)
340{
341 intel_desc_pool_free(region->dev->desc_pool,
342 &region->pool_begin, &region->pool_end);
343 intel_base_destroy(&region->obj.base);
344}
345
346XGL_RESULT intel_desc_region_alloc(struct intel_desc_region *region,
347 const struct intel_desc_layout *layout,
348 struct intel_desc_offset *begin,
349 struct intel_desc_offset *end)
350{
351 *begin = region->cur;
352 intel_desc_offset_add(end, &region->cur, &layout->pool_size);
353
354 if (!intel_desc_offset_within(end, &region->pool_end))
355 return XGL_ERROR_OUT_OF_MEMORY;
356
357 /* increment the writer pointer */
358 region->cur = *end;
359
360 return XGL_SUCCESS;
361}
362
363void intel_desc_region_free_all(struct intel_desc_region *region)
364{
365 /* reset to head */
366 region->cur = region->pool_begin;
367}
368
369static void desc_set_destroy(struct intel_obj *obj)
370{
371 struct intel_desc_set *set = intel_desc_set_from_obj(obj);
372
373 intel_desc_set_destroy(set);
374}
375
376XGL_RESULT intel_desc_set_create(struct intel_dev *dev,
377 struct intel_desc_region *region,
378 XGL_DESCRIPTOR_SET_USAGE usage,
379 const struct intel_desc_layout *layout,
380 struct intel_desc_set **set_ret)
381{
382 struct intel_desc_set *set;
383 XGL_RESULT ret;
384
385 set = (struct intel_desc_set *)
386 intel_base_create(dev, sizeof(*set), dev->base.dbg,
387 XGL_DBG_OBJECT_DESCRIPTOR_SET, NULL, 0);
388 if (!set)
389 return XGL_ERROR_OUT_OF_MEMORY;
390
391 set->pool = dev->desc_pool;
392 ret = intel_desc_region_alloc(region, layout,
393 &set->pool_begin, &set->pool_end);
394 if (ret != XGL_SUCCESS) {
395 intel_base_destroy(&set->obj.base);
396 return ret;
397 }
398
399 set->layout = layout;
400
401 set->obj.destroy = desc_set_destroy;
402
403 *set_ret = set;
404
405 return XGL_SUCCESS;
406}
407
408void intel_desc_set_destroy(struct intel_desc_set *set)
409{
410 intel_base_destroy(&set->obj.base);
411}
412
413static void desc_set_update(struct intel_desc_set *set,
414 const struct intel_desc_layout_iter *iter,
415 const struct intel_desc_surface *surfaces,
416 const struct intel_desc_sampler *samplers)
417{
418 struct intel_desc_offset begin, end;
419
420 intel_desc_offset_add(&begin, &set->pool_begin, &iter->offset_begin);
421 intel_desc_offset_add(&end, &set->pool_begin, &iter->offset_end);
422
423 intel_desc_pool_update(set->pool, &begin, &end, surfaces, samplers);
424}
425
426static bool desc_set_img_layout_read_only(XGL_IMAGE_LAYOUT layout)
427{
428 switch (layout) {
429 case XGL_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
430 case XGL_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
431 case XGL_IMAGE_LAYOUT_TRANSFER_SOURCE_OPTIMAL:
432 return true;
433 default:
434 return false;
435 }
436}
437
438void intel_desc_set_update_samplers(struct intel_desc_set *set,
439 const XGL_UPDATE_SAMPLERS *update)
440{
441 struct intel_desc_layout_iter iter;
442 uint32_t i;
443
444 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
445 return;
446
447 for (i = 0; i < update->count; i++) {
448 const struct intel_sampler *sampler =
449 intel_sampler((XGL_SAMPLER) update->pSamplers[i]);
450 struct intel_desc_sampler desc;
451
452 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER)
453 break;
454
455 desc.sampler = sampler;
456 desc_set_update(set, &iter, NULL, &desc);
457
458 if (!intel_desc_layout_advance_iter(set->layout, &iter))
459 break;
460 }
461}
462
463void intel_desc_set_update_sampler_textures(struct intel_desc_set *set,
464 const XGL_UPDATE_SAMPLER_TEXTURES *update)
465{
466 struct intel_desc_layout_iter iter;
467 const struct intel_sampler *immutable_sampler = NULL;
468 uint32_t i;
469
470 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
471 return;
472
473 for (i = 0; i < update->count; i++) {
474 const struct intel_sampler *sampler =
475 intel_sampler(update->pSamplerImageViews[i].pSampler);
476 const XGL_IMAGE_VIEW_ATTACH_INFO *info =
477 update->pSamplerImageViews[i].pImageView;
478 const struct intel_img_view *view = intel_img_view(info->view);
479 struct intel_desc_surface view_desc;
480 struct intel_desc_sampler sampler_desc;
481
482 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
483 return;
484
485 /* update every immutable sampler once */
486 if (immutable_sampler != iter.range->immutable_sampler) {
487 immutable_sampler = iter.range->immutable_sampler;
488
489 if (immutable_sampler) {
490 struct intel_desc_offset begin, end;
491
492 intel_desc_offset_add(&begin, &set->pool_begin,
493 &iter.offset_begin);
494 intel_desc_offset_set(&end, begin.surface,
495 begin.sampler + set->pool->sampler_desc_size);
496
497 sampler_desc.sampler = immutable_sampler;
498 intel_desc_pool_update(set->pool, &begin, &end,
499 NULL, &sampler_desc);
500 }
501 }
502
503 view_desc.mem = view->img->obj.mem;
504 view_desc.read_only = desc_set_img_layout_read_only(info->layout);
505 view_desc.type = INTEL_DESC_SURFACE_IMG;
506 view_desc.u.img = view;
507
508 sampler_desc.sampler = sampler;
509
510 desc_set_update(set, &iter, &view_desc, &sampler_desc);
511
512 if (!intel_desc_layout_advance_iter(set->layout, &iter))
513 break;
514 }
515}
516
517void intel_desc_set_update_images(struct intel_desc_set *set,
518 const XGL_UPDATE_IMAGES *update)
519{
520 struct intel_desc_layout_iter iter;
521 uint32_t i;
522
523 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
524 return;
525
526 for (i = 0; i < update->count; i++) {
527 const XGL_IMAGE_VIEW_ATTACH_INFO *info = update->pImageViews[i];
528 const struct intel_img_view *view = intel_img_view(info->view);
529 struct intel_desc_surface desc;
530
531 if (iter.type != update->descriptorType)
532 break;
533
534 desc.mem = view->img->obj.mem;
535 desc.read_only = desc_set_img_layout_read_only(info->layout);
536 desc.type = INTEL_DESC_SURFACE_IMG;
537 desc.u.img = view;
538 desc_set_update(set, &iter, &desc, NULL);
539
540 if (!intel_desc_layout_advance_iter(set->layout, &iter))
541 break;
542 }
543}
544
545void intel_desc_set_update_buffers(struct intel_desc_set *set,
546 const XGL_UPDATE_BUFFERS *update)
547{
548 struct intel_desc_layout_iter iter;
549 uint32_t i;
550
551 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
552 return;
553
554 for (i = 0; i < update->count; i++) {
555 const XGL_BUFFER_VIEW_ATTACH_INFO *info = update->pBufferViews[i];
556 const struct intel_buf_view *view = intel_buf_view(info->view);
557 struct intel_desc_surface desc;
558
559 if (iter.type != update->descriptorType)
560 break;
561
562 desc.mem = view->buf->obj.mem;
563 desc.read_only = false;
564 desc.type = INTEL_DESC_SURFACE_BUF;
565 desc.u.buf = view;
566 desc_set_update(set, &iter, &desc, NULL);
567
568 if (!intel_desc_layout_advance_iter(set->layout, &iter))
569 break;
570 }
571}
572
573void intel_desc_set_update_as_copy(struct intel_desc_set *set,
574 const XGL_UPDATE_AS_COPY *update)
575{
576 const struct intel_desc_set *src_set =
577 intel_desc_set(update->descriptorSet);
578 struct intel_desc_layout_iter iter, src_iter;
579 struct intel_desc_offset begin, end, src_begin;
580 uint32_t i;
581
582 /* disallow combined sampler textures */
583 if (update->descriptorType == XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
584 return;
585
586 /* no update->index? */
587 if (!intel_desc_layout_find_index(set->layout, 0, &iter))
588 return;
589 if (!intel_desc_layout_find_index(src_set->layout,
590 update->descriptorIndex, &src_iter))
591 return;
592
593 intel_desc_offset_add(&begin, &set->pool_begin, &iter.offset_begin);
594 intel_desc_offset_add(&src_begin, &src_set->pool_begin,
595 &src_iter.offset_end);
596
597 /* advance to end */
598 for (i = 0; i < update->count; i++) {
599 if (iter.type != update->descriptorType ||
600 src_iter.type != update->descriptorType ||
601 !intel_desc_layout_advance_iter(set->layout, &iter) ||
602 !intel_desc_layout_advance_iter(src_set->layout, &src_iter))
603 break;
604 }
605 if (i < update->count)
606 return;
607
608 intel_desc_offset_add(&end, &src_set->pool_begin, &iter.offset_end);
609
610 intel_desc_pool_copy(src_set->pool, &begin, &end, &src_begin);
611}
612
Chia-I Wu2f0cba82015-02-12 10:15:42 -0700613static void desc_set_read(const struct intel_desc_set *set,
614 const struct intel_desc_offset *offset,
615 const struct intel_desc_surface **surface,
616 const struct intel_desc_sampler **sampler)
617{
618 struct intel_desc_offset begin, end;
619
620 intel_desc_offset_add(&begin, &set->pool_begin, offset);
621 intel_desc_offset_set(&end, 0, 0);
622
623 if (surface) {
624 *surface = (const struct intel_desc_surface *)
625 ((const char *) set->pool->surfaces + begin.surface);
626
627 end.surface = set->pool->surface_desc_size;
628 }
629
630 if (sampler) {
631 *sampler = (const struct intel_desc_sampler *)
632 ((const char *) set->pool->samplers + begin.sampler);
633
634 end.sampler = set->pool->sampler_desc_size;
635 }
636
637 intel_desc_offset_add(&end, &begin, &end);
638 desc_pool_validate_begin_end(set->pool, &begin, &end);
639}
640
641void intel_desc_set_read_surface(const struct intel_desc_set *set,
642 const struct intel_desc_offset *offset,
643 XGL_PIPELINE_SHADER_STAGE stage,
644 const struct intel_mem **mem,
645 bool *read_only,
646 const uint32_t **cmd,
647 uint32_t *cmd_len)
648{
649 const struct intel_desc_surface *desc;
650
651 desc_set_read(set, offset, &desc, NULL);
652
653 *mem = desc->mem;
654 *read_only = desc->read_only;
655 switch (desc->type) {
656 case INTEL_DESC_SURFACE_BUF:
657 *cmd = (stage == XGL_SHADER_STAGE_FRAGMENT) ?
658 desc->u.buf->fs_cmd : desc->u.buf->cmd;
659 *cmd_len = desc->u.buf->cmd_len;
660 break;
661 case INTEL_DESC_SURFACE_IMG:
662 *cmd = desc->u.img->cmd;
663 *cmd_len = desc->u.img->cmd_len;
664 break;
665 case INTEL_DESC_SURFACE_UNUSED:
666 default:
667 *cmd = NULL;
668 *cmd_len = 0;
669 break;
670 }
671}
672
673void intel_desc_set_read_sampler(const struct intel_desc_set *set,
674 const struct intel_desc_offset *offset,
675 const struct intel_sampler **sampler)
676{
677 const struct intel_desc_sampler *desc;
678
679 desc_set_read(set, offset, NULL, &desc);
680
681 *sampler = desc->sampler;
682}
683
Chia-I Wuf8385062015-01-04 16:27:24 +0800684static void desc_layout_destroy(struct intel_obj *obj)
685{
686 struct intel_desc_layout *layout = intel_desc_layout_from_obj(obj);
687
688 intel_desc_layout_destroy(layout);
689}
690
691static XGL_RESULT desc_layout_alloc_ranges(struct intel_desc_layout *layout,
692 const struct intel_desc_pool *pool,
693 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
694{
695 /* calculate counts */
696 while (info) {
697 if (info->sType !=
698 XGL_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)
699 return XGL_ERROR_INVALID_POINTER;
700
701 switch (info->descriptorType) {
702 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
703 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
704 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
705 layout->dynamic_desc_count += info->count;
706 break;
707 default:
708 break;
709 }
710
711 layout->range_count++;
712 info = info->pNext;
713 }
714
715 layout->ranges = icd_alloc(sizeof(layout->ranges[0]) *
716 layout->range_count, 0, XGL_SYSTEM_ALLOC_INTERNAL);
717 if (!layout->ranges)
718 return XGL_ERROR_OUT_OF_MEMORY;
719
720 memset(layout->ranges, 0, sizeof(layout->ranges[0]) *
721 layout->range_count);
722
723 return XGL_SUCCESS;
724}
725
726static XGL_RESULT desc_layout_init_ranges(struct intel_desc_layout *layout,
727 const struct intel_desc_pool *pool,
728 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
729{
730 struct intel_desc_offset offset;
731 uint32_t index, i;
732 XGL_RESULT ret;
733
734 ret = desc_layout_alloc_ranges(layout, pool, info);
735 if (ret != XGL_SUCCESS)
736 return ret;
737
738 if (layout->prior_layout) {
739 index = layout->prior_layout->end;
740 offset = layout->prior_layout->pool_size;
741 } else {
742 index = 0;
743 intel_desc_offset_set(&offset, 0, 0);
744 }
745
746 layout->begin = index;
747
748 /* initialize the ranges */
749 for (i = 0; i < layout->range_count; i++, info = info->pNext) {
750 struct intel_desc_layout_range *range = &layout->ranges[i];
751 struct intel_desc_offset size;
752
753 ret = desc_pool_get_desc_size(pool, info->descriptorType, &size);
754 if (ret != XGL_SUCCESS)
755 return ret;
756
757 /* info->stageFlags does not gain us anything */
758 range->type = info->descriptorType;
759
760 range->begin = index;
761 range->offset = offset;
762
763 if (info->immutableSampler != XGL_NULL_HANDLE) {
764 range->immutable_sampler = intel_sampler(info->immutableSampler);
765 /* do not increment sampler offset */
766 intel_desc_offset_set(&range->increment, size.surface, 0);
767 } else {
768 range->immutable_sampler = NULL;
769 range->increment = size;
770 }
771
772 /* increment index and offset */
773 index += info->count;
774 intel_desc_offset_mad(&size, &range->increment, &size,
775 info->count - 1);
776 intel_desc_offset_add(&offset, &offset, &size);
777
778 range->end = index;
779 }
780
781 layout->end = index;
782 layout->pool_size = offset;
783
784 return XGL_SUCCESS;
785}
786
787static XGL_RESULT desc_layout_init_bind_points(struct intel_desc_layout *layout,
788 XGL_FLAGS stage_flags,
789 const uint32_t *bind_points)
790{
791 if (!bind_points)
792 return XGL_ERROR_INVALID_POINTER;
793
794 layout->stage_flags = stage_flags;
795
796 if (stage_flags == XGL_SHADER_STAGE_FLAGS_ALL) {
797 layout->bind_point_vs = *bind_points;
798 layout->bind_point_tcs = *bind_points;
799 layout->bind_point_tes = *bind_points;
800 layout->bind_point_gs = *bind_points;
801 layout->bind_point_fs = *bind_points;
802 layout->bind_point_cs = *bind_points;
803 } else {
804 if (stage_flags & XGL_SHADER_STAGE_FLAGS_VERTEX_BIT)
805 layout->bind_point_vs = *bind_points++;
806 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_CONTROL_BIT)
807 layout->bind_point_tcs = *bind_points++;
808 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_EVALUATION_BIT)
809 layout->bind_point_tes = *bind_points++;
810 if (stage_flags & XGL_SHADER_STAGE_FLAGS_GEOMETRY_BIT)
811 layout->bind_point_gs = *bind_points++;
812 if (stage_flags & XGL_SHADER_STAGE_FLAGS_FRAGMENT_BIT)
813 layout->bind_point_fs = *bind_points++;
814 if (stage_flags & XGL_SHADER_STAGE_FLAGS_COMPUTE_BIT)
815 layout->bind_point_cs = *bind_points++;
816 }
817
818 return XGL_SUCCESS;
819}
820
821XGL_RESULT intel_desc_layout_create(struct intel_dev *dev,
822 XGL_FLAGS stage_flags,
823 const uint32_t *bind_points,
824 const struct intel_desc_layout *prior_layout,
825 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info,
826 struct intel_desc_layout **layout_ret)
827{
828 struct intel_desc_layout *layout;
829 XGL_RESULT ret;
830
831 layout = (struct intel_desc_layout *)
832 intel_base_create(dev, sizeof(*layout), dev->base.dbg,
833 XGL_DBG_OBJECT_DESCRIPTOR_SET_LAYOUT, info, 0);
834 if (!layout)
835 return XGL_ERROR_OUT_OF_MEMORY;
836
837 layout->prior_layout = prior_layout;
838
839 ret = desc_layout_init_bind_points(layout, stage_flags, bind_points);
840 if (ret == XGL_SUCCESS)
841 ret = desc_layout_init_ranges(layout, dev->desc_pool, info);
842 if (ret != XGL_SUCCESS) {
843 intel_desc_layout_destroy(layout);
844 return ret;
845 }
846
847 layout->obj.destroy = desc_layout_destroy;
848
849 *layout_ret = layout;
850
851 return XGL_SUCCESS;
852}
853
854void intel_desc_layout_destroy(struct intel_desc_layout *layout)
855{
856 icd_free(layout->ranges);
857 intel_base_destroy(&layout->obj.base);
858}
859
860static void desc_layout_init_iter(const struct intel_desc_layout *sublayout,
861 uint32_t index,
862 struct intel_desc_layout_iter *iter)
863{
864 const struct intel_desc_layout_range *range;
865
866 assert(index >= sublayout->begin && index < sublayout->end);
867
868 /* find the range the index is in */
869 for (range = sublayout->ranges;; range++) {
870 assert(range < sublayout->ranges + sublayout->range_count);
871 if (index < range->end)
872 break;
873 }
874
875 /* current position */
876 iter->sublayout = sublayout;
877 iter->range = range;
878 iter->index = index;
879
880 iter->type = iter->range->type;
881 intel_desc_offset_mad(&iter->offset_begin, &range->increment,
882 &range->offset, iter->index - range->begin);
883 intel_desc_offset_add(&iter->offset_end, &iter->offset_begin,
884 &range->increment);
885}
886
887bool intel_desc_layout_find_bind_point(const struct intel_desc_layout *layout,
888 XGL_PIPELINE_SHADER_STAGE stage,
889 uint32_t set, uint32_t binding,
890 struct intel_desc_layout_iter *iter)
891{
892 /* find the layout at the bind point */
893 switch (stage) {
894#define CASE(stage, s) \
895 case XGL_SHADER_STAGE_ ##stage: \
896 while (layout) { \
897 if ((layout->stage_flags & \
898 XGL_SHADER_STAGE_FLAGS_ ##stage## _BIT) && \
899 layout->bind_point_ ##s == set) \
900 break; \
901 layout = layout->prior_layout; \
902 } \
903 break
904 CASE(VERTEX, vs);
905 CASE(TESS_CONTROL, tcs);
906 CASE(TESS_EVALUATION, tes);
907 CASE(GEOMETRY, gs);
908 CASE(FRAGMENT, fs);
909 CASE(COMPUTE, cs);
910#undef CASE
911 default:
912 assert(!"unknown shader stage");
913 layout = NULL;
914 break;
915 }
916
917 if (!layout || layout->begin + binding >= layout->end) {
918 memset(iter, 0, sizeof(*iter));
919 return false;
920 }
921
922 desc_layout_init_iter(layout, layout->begin + binding, iter);
923
924 return true;
925}
926
927bool intel_desc_layout_find_index(const struct intel_desc_layout *layout,
928 uint32_t index,
929 struct intel_desc_layout_iter *iter)
930{
931 if (index >= layout->begin) {
932 /* out of bound */
933 if (index >= layout->end)
934 layout = NULL;
935 } else {
936 while (true) {
937 layout = layout->prior_layout;
938 if (index >= layout->begin) {
939 assert(index < layout->end);
940 break;
941 }
942 }
943 }
944
945 if (!layout) {
946 memset(iter, 0, sizeof(*iter));
947 return false;
948 }
949
950 desc_layout_init_iter(layout, index, iter);
951
952 return true;
953}
954
955bool intel_desc_layout_advance_iter(const struct intel_desc_layout *layout,
956 struct intel_desc_layout_iter *iter)
957{
958 /* all descriptors traversed */
959 if (!iter->sublayout)
960 return false;
961
962 iter->index++;
963 if (iter->index >= iter->range->end) {
964 /* move to the next range */
965 iter->range++;
966
967 if (iter->range >= iter->sublayout->ranges +
968 iter->sublayout->range_count) {
969 /* find again as the chain is not doubly-linked */
970 const bool ret = intel_desc_layout_find_index(layout,
971 iter->index, iter);
972 if (!ret) {
973 iter->sublayout = NULL;
974 iter->range = NULL;
975 }
976
977 return ret;
978 }
979
980 iter->type = iter->range->type;
981 }
982
983 iter->offset_begin = iter->offset_end;
984 intel_desc_offset_add(&iter->offset_end, &iter->offset_end,
985 &iter->range->increment);
986
987 return true;
988}
989
990XGL_RESULT XGLAPI xglCreateDescriptorSetLayout(
991 XGL_DEVICE device,
992 XGL_FLAGS stageFlags,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -0600993 const uint32_t* pSetBindPoints,
Chia-I Wuf8385062015-01-04 16:27:24 +0800994 XGL_DESCRIPTOR_SET_LAYOUT priorSetLayout,
995 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO* pSetLayoutInfoList,
996 XGL_DESCRIPTOR_SET_LAYOUT* pSetLayout)
997{
998 struct intel_dev *dev = intel_dev(device);
999 struct intel_desc_layout *prior_layout = intel_desc_layout(priorSetLayout);
1000
1001 return intel_desc_layout_create(dev, stageFlags, pSetBindPoints,
1002 prior_layout, pSetLayoutInfoList,
1003 (struct intel_desc_layout **) pSetLayout);
1004}
1005
1006XGL_RESULT XGLAPI xglBeginDescriptorRegionUpdate(
1007 XGL_DEVICE device,
1008 XGL_DESCRIPTOR_UPDATE_MODE updateMode)
1009{
1010 struct intel_dev *dev = intel_dev(device);
1011 struct intel_desc_pool *pool = dev->desc_pool;
1012
1013 return intel_desc_pool_begin_update(pool, updateMode);
1014}
1015
1016XGL_RESULT XGLAPI xglEndDescriptorRegionUpdate(
1017 XGL_DEVICE device,
1018 XGL_CMD_BUFFER cmd_)
1019{
1020 struct intel_dev *dev = intel_dev(device);
1021 struct intel_desc_pool *pool = dev->desc_pool;
1022 struct intel_cmd *cmd = intel_cmd(cmd_);
1023
1024 return intel_desc_pool_end_update(pool, cmd);
1025}
1026
1027XGL_RESULT XGLAPI xglCreateDescriptorRegion(
1028 XGL_DEVICE device,
1029 XGL_DESCRIPTOR_REGION_USAGE regionUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001030 uint32_t maxSets,
Chia-I Wuf8385062015-01-04 16:27:24 +08001031 const XGL_DESCRIPTOR_REGION_CREATE_INFO* pCreateInfo,
1032 XGL_DESCRIPTOR_REGION* pDescriptorRegion)
1033{
1034 struct intel_dev *dev = intel_dev(device);
1035
1036 return intel_desc_region_create(dev, regionUsage, maxSets, pCreateInfo,
1037 (struct intel_desc_region **) pDescriptorRegion);
1038}
1039
1040XGL_RESULT XGLAPI xglClearDescriptorRegion(
1041 XGL_DESCRIPTOR_REGION descriptorRegion)
1042{
1043 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1044
1045 intel_desc_region_free_all(region);
1046
1047 return XGL_SUCCESS;
1048}
1049
1050XGL_RESULT XGLAPI xglAllocDescriptorSets(
1051 XGL_DESCRIPTOR_REGION descriptorRegion,
1052 XGL_DESCRIPTOR_SET_USAGE setUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001053 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001054 const XGL_DESCRIPTOR_SET_LAYOUT* pSetLayouts,
1055 XGL_DESCRIPTOR_SET* pDescriptorSets,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001056 uint32_t* pCount)
Chia-I Wuf8385062015-01-04 16:27:24 +08001057{
1058 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1059 struct intel_dev *dev = region->dev;
1060 XGL_RESULT ret = XGL_SUCCESS;
1061 uint32_t i;
1062
1063 for (i = 0; i < count; i++) {
1064 const struct intel_desc_layout *layout =
1065 intel_desc_layout((XGL_DESCRIPTOR_SET_LAYOUT) pSetLayouts[i]);
1066
1067 ret = intel_desc_set_create(dev, region, setUsage, layout,
1068 (struct intel_desc_set **) &pDescriptorSets[i]);
1069 if (ret != XGL_SUCCESS)
1070 break;
1071 }
1072
1073 if (pCount)
1074 *pCount = i;
1075
1076 return ret;
1077}
1078
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001079void XGLAPI xglClearDescriptorSets(
Chia-I Wuf8385062015-01-04 16:27:24 +08001080 XGL_DESCRIPTOR_REGION descriptorRegion,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001081 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001082 const XGL_DESCRIPTOR_SET* pDescriptorSets)
1083{
1084 uint32_t i;
1085
1086 for (i = 0; i < count; i++) {
1087 struct intel_desc_set *set =
1088 intel_desc_set((XGL_DESCRIPTOR_SET) pDescriptorSets[i]);
1089
1090 intel_desc_pool_clear(set->pool, &set->pool_begin, &set->pool_end);
1091 }
1092}
1093
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001094void XGLAPI xglUpdateDescriptors(
Chia-I Wuf8385062015-01-04 16:27:24 +08001095 XGL_DESCRIPTOR_SET descriptorSet,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001096 const void* pUpdateChain)
Chia-I Wuf8385062015-01-04 16:27:24 +08001097{
1098 struct intel_desc_set *set = intel_desc_set(descriptorSet);
1099 const union {
1100 struct {
1101 XGL_STRUCTURE_TYPE sType;
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001102 const void* pNext;
Chia-I Wuf8385062015-01-04 16:27:24 +08001103 } common;
1104
1105 XGL_UPDATE_SAMPLERS samplers;
1106 XGL_UPDATE_SAMPLER_TEXTURES sampler_textures;
1107 XGL_UPDATE_IMAGES images;
1108 XGL_UPDATE_BUFFERS buffers;
1109 XGL_UPDATE_AS_COPY as_copy;
1110 } *u = pUpdateChain;
1111
1112 while (u) {
1113 switch (u->common.sType) {
1114 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1115 intel_desc_set_update_samplers(set, &u->samplers);
1116 break;
1117 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1118 intel_desc_set_update_sampler_textures(set, &u->sampler_textures);
1119 break;
1120 case XGL_STRUCTURE_TYPE_UPDATE_IMAGES:
1121 intel_desc_set_update_images(set, &u->images);
1122 break;
1123 case XGL_STRUCTURE_TYPE_UPDATE_BUFFERS:
1124 intel_desc_set_update_buffers(set, &u->buffers);
1125 break;
1126 case XGL_STRUCTURE_TYPE_UPDATE_AS_COPY:
1127 intel_desc_set_update_as_copy(set, &u->as_copy);
1128 break;
1129 default:
1130 assert(!"unknown descriptor update");
1131 break;
1132 }
1133
1134 u = u->common.pNext;
1135 }
1136}