blob: 003ec122daf76122649909821087582ec98c263f [file] [log] [blame]
Chia-I Wuf8385062015-01-04 16:27:24 +08001/*
2 * XGL
3 *
4 * Copyright (C) 2015 LunarG, Inc.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included
14 * in all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 *
24 * Authors:
25 * Chia-I Wu <olv@lunarg.com>
26 */
27
28#include "buf.h"
29#include "cmd.h"
30#include "dev.h"
31#include "gpu.h"
32#include "img.h"
33#include "sampler.h"
34#include "view.h"
35#include "desc.h"
36
37enum intel_desc_surface_type {
38 INTEL_DESC_SURFACE_UNUSED,
39 INTEL_DESC_SURFACE_BUF,
40 INTEL_DESC_SURFACE_IMG,
41};
42
43struct intel_desc_surface {
44 const struct intel_mem *mem;
45 bool read_only;
46
47 enum intel_desc_surface_type type;
48 union {
49 const void *unused;
50 const struct intel_buf_view *buf;
51 const struct intel_img_view *img;
52 } u;
53};
54
55struct intel_desc_sampler {
56 const struct intel_sampler *sampler;
57};
58
59static bool desc_pool_init_desc_sizes(struct intel_desc_pool *pool,
60 const struct intel_gpu *gpu)
61{
62 pool->surface_desc_size = sizeof(struct intel_desc_surface);
63 pool->sampler_desc_size = sizeof(struct intel_desc_sampler);
64
65 return true;
66}
67
68XGL_RESULT intel_desc_pool_create(struct intel_dev *dev,
69 struct intel_desc_pool **pool_ret)
70{
71 const uint32_t surface_count = 16384;
72 const uint32_t sampler_count = 16384;
73 struct intel_desc_pool *pool;
74
Chia-I Wuf9c81ef2015-02-22 13:49:15 +080075 pool = intel_alloc(dev, sizeof(*pool), 0, XGL_SYSTEM_ALLOC_INTERNAL);
Chia-I Wuf8385062015-01-04 16:27:24 +080076 if (!pool)
77 return XGL_ERROR_OUT_OF_MEMORY;
78
79 memset(pool, 0, sizeof(*pool));
80
81 if (!desc_pool_init_desc_sizes(pool, dev->gpu)) {
Chia-I Wuf9c81ef2015-02-22 13:49:15 +080082 intel_free(dev, pool);
Chia-I Wuf8385062015-01-04 16:27:24 +080083 return XGL_ERROR_UNKNOWN;
84 }
85
86 intel_desc_offset_set(&pool->size,
87 pool->surface_desc_size * surface_count,
88 pool->sampler_desc_size * sampler_count);
89
Chia-I Wuf9c81ef2015-02-22 13:49:15 +080090 pool->surfaces = intel_alloc(dev, pool->size.surface,
Chia-I Wuf8385062015-01-04 16:27:24 +080091 64, XGL_SYSTEM_ALLOC_INTERNAL);
92 if (!pool->surfaces) {
Chia-I Wuf9c81ef2015-02-22 13:49:15 +080093 intel_free(dev, pool);
Chia-I Wuf8385062015-01-04 16:27:24 +080094 return XGL_ERROR_OUT_OF_MEMORY;
95 }
96
Chia-I Wuf9c81ef2015-02-22 13:49:15 +080097 pool->samplers = intel_alloc(dev, pool->size.sampler,
Chia-I Wuf8385062015-01-04 16:27:24 +080098 64, XGL_SYSTEM_ALLOC_INTERNAL);
99 if (!pool->samplers) {
Chia-I Wuf9c81ef2015-02-22 13:49:15 +0800100 intel_free(dev, pool->surfaces);
101 intel_free(dev, pool);
Chia-I Wuf8385062015-01-04 16:27:24 +0800102 return XGL_ERROR_OUT_OF_MEMORY;
103 }
104
105 *pool_ret = pool;
106
107 return XGL_SUCCESS;
108}
109
Chia-I Wuf13ed3c2015-02-22 14:09:00 +0800110void intel_desc_pool_destroy(struct intel_dev *dev,
111 struct intel_desc_pool *pool)
Chia-I Wuf8385062015-01-04 16:27:24 +0800112{
Chia-I Wuf13ed3c2015-02-22 14:09:00 +0800113 intel_free(dev, pool->samplers);
114 intel_free(dev, pool->surfaces);
115 intel_free(dev, pool);
Chia-I Wuf8385062015-01-04 16:27:24 +0800116}
117
118/**
119 * Get the size of a descriptor in the pool.
120 */
121static XGL_RESULT desc_pool_get_desc_size(const struct intel_desc_pool *pool,
122 XGL_DESCRIPTOR_TYPE type,
123 struct intel_desc_offset *size)
124{
125 uint32_t surface_size = 0, sampler_size = 0;
126
127 switch (type) {
128 case XGL_DESCRIPTOR_TYPE_SAMPLER:
129 sampler_size = pool->sampler_desc_size;
130 break;
131 case XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE:
132 surface_size = pool->surface_desc_size;
133 sampler_size = pool->sampler_desc_size;
134 break;
135 case XGL_DESCRIPTOR_TYPE_TEXTURE:
136 case XGL_DESCRIPTOR_TYPE_TEXTURE_BUFFER:
137 case XGL_DESCRIPTOR_TYPE_IMAGE:
138 case XGL_DESCRIPTOR_TYPE_IMAGE_BUFFER:
139 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
140 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER:
141 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER:
142 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
143 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
144 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
145 surface_size = pool->surface_desc_size;
146 break;
147 default:
148 assert(!"unknown descriptor type");
149 return XGL_ERROR_INVALID_VALUE;
150 break;
151 }
152
153 intel_desc_offset_set(size, surface_size, sampler_size);
154
155 return XGL_SUCCESS;
156}
157
158XGL_RESULT intel_desc_pool_alloc(struct intel_desc_pool *pool,
159 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
160 struct intel_desc_offset *begin,
161 struct intel_desc_offset *end)
162{
163 uint32_t surface_size = 0, sampler_size = 0;
164 struct intel_desc_offset alloc;
165 uint32_t i;
166
167 /* calculate sizes needed */
168 for (i = 0; i < info->count; i++) {
169 const XGL_DESCRIPTOR_TYPE_COUNT *tc = &info->pTypeCount[i];
170 struct intel_desc_offset size;
171 XGL_RESULT ret;
172
173 ret = desc_pool_get_desc_size(pool, tc->type, &size);
174 if (ret != XGL_SUCCESS)
175 return ret;
176
177 surface_size += size.surface * tc->count;
178 sampler_size += size.sampler * tc->count;
179 }
180
181 intel_desc_offset_set(&alloc, surface_size, sampler_size);
182
183 *begin = pool->cur;
184 intel_desc_offset_add(end, &pool->cur, &alloc);
185
186 if (!intel_desc_offset_within(end, &pool->size))
187 return XGL_ERROR_OUT_OF_MEMORY;
188
189 /* increment the writer pointer */
190 pool->cur = *end;
191
192 return XGL_SUCCESS;
193}
194
195static void desc_pool_validate_begin_end(const struct intel_desc_pool *pool,
196 const struct intel_desc_offset *begin,
197 const struct intel_desc_offset *end)
198{
199 assert(begin->surface % pool->surface_desc_size == 0 &&
200 begin->sampler % pool->sampler_desc_size == 0);
201 assert(end->surface % pool->surface_desc_size == 0 &&
202 end->sampler % pool->sampler_desc_size == 0);
203 assert(intel_desc_offset_within(end, &pool->size));
204}
205
206void intel_desc_pool_free(struct intel_desc_pool *pool,
207 const struct intel_desc_offset *begin,
208 const struct intel_desc_offset *end)
209{
210 desc_pool_validate_begin_end(pool, begin, end);
211
212 /* is it ok not to reclaim? */
213}
214
215XGL_RESULT intel_desc_pool_begin_update(struct intel_desc_pool *pool,
216 XGL_DESCRIPTOR_UPDATE_MODE mode)
217{
218 /* no-op */
219 return XGL_SUCCESS;
220}
221
222XGL_RESULT intel_desc_pool_end_update(struct intel_desc_pool *pool,
223 struct intel_cmd *cmd)
224{
225 /* No pipelined update. cmd_draw() will do the work. */
226 return XGL_SUCCESS;
227}
228
229void intel_desc_pool_clear(struct intel_desc_pool *pool,
230 const struct intel_desc_offset *begin,
231 const struct intel_desc_offset *end)
232{
233 uint32_t i;
234
235 desc_pool_validate_begin_end(pool, begin, end);
236
237 for (i = begin->surface; i < end->surface; i += pool->surface_desc_size) {
238 struct intel_desc_surface *desc = (struct intel_desc_surface *)
239 ((char *) pool->surfaces + i);
240
241 desc->mem = NULL;
242 desc->type = INTEL_DESC_SURFACE_UNUSED;
243 desc->u.unused = NULL;
244 }
245
246 for (i = begin->sampler; i < end->sampler; i += pool->sampler_desc_size) {
247 struct intel_desc_sampler *desc = (struct intel_desc_sampler *)
248 ((char *) pool->samplers + i);
249
250 desc->sampler = NULL;
251 }
252}
253
254void intel_desc_pool_update(struct intel_desc_pool *pool,
255 const struct intel_desc_offset *begin,
256 const struct intel_desc_offset *end,
257 const struct intel_desc_surface *surfaces,
258 const struct intel_desc_sampler *samplers)
259{
260 desc_pool_validate_begin_end(pool, begin, end);
261
262 if (begin->surface < end->surface) {
263 memcpy((char *) pool->surfaces + begin->surface, surfaces,
264 end->surface - begin->surface);
265 }
266
267 if (begin->sampler < end->sampler) {
268 memcpy((char *) pool->samplers + begin->sampler, samplers,
269 end->sampler - begin->sampler);
270 }
271}
272
273void intel_desc_pool_copy(struct intel_desc_pool *pool,
274 const struct intel_desc_offset *begin,
275 const struct intel_desc_offset *end,
276 const struct intel_desc_offset *src)
277{
278 struct intel_desc_offset src_end;
279 const struct intel_desc_surface *surfaces;
280 const struct intel_desc_sampler *samplers;
281
282 /* no overlap */
283 assert(intel_desc_offset_within(src, begin) ||
284 intel_desc_offset_within(end, src));
285
286 /* no read past pool */
287 intel_desc_offset_sub(&src_end, end, begin);
288 intel_desc_offset_add(&src_end, src, &src_end);
289 assert(intel_desc_offset_within(&src_end, &pool->size));
290
291 surfaces = (const struct intel_desc_surface *)
292 ((const char *) pool->surfaces + src->surface);
Tony Barbour379e0a72015-02-05 11:09:34 -0700293 samplers = (const struct intel_desc_sampler *)
Chia-I Wuf8385062015-01-04 16:27:24 +0800294 ((const char *) pool->samplers + src->sampler);
295
296 intel_desc_pool_update(pool, begin, end, surfaces, samplers);
297}
298
Chia-I Wuf8385062015-01-04 16:27:24 +0800299static void desc_region_destroy(struct intel_obj *obj)
300{
301 struct intel_desc_region *region = intel_desc_region_from_obj(obj);
302
303 intel_desc_region_destroy(region);
304}
305
306XGL_RESULT intel_desc_region_create(struct intel_dev *dev,
307 XGL_DESCRIPTOR_REGION_USAGE usage,
308 uint32_t max_sets,
309 const XGL_DESCRIPTOR_REGION_CREATE_INFO *info,
310 struct intel_desc_region **region_ret)
311{
312 struct intel_desc_region *region;
313 XGL_RESULT ret;
314
Chia-I Wu545c2e12015-02-22 13:19:54 +0800315 region = (struct intel_desc_region *) intel_base_create(&dev->base.handle,
316 sizeof(*region), dev->base.dbg, XGL_DBG_OBJECT_DESCRIPTOR_REGION,
317 info, 0);
Chia-I Wuf8385062015-01-04 16:27:24 +0800318 if (!region)
319 return XGL_ERROR_OUT_OF_MEMORY;
320
321 region->dev = dev;
322
323 ret = intel_desc_pool_alloc(dev->desc_pool, info,
324 &region->pool_begin, &region->pool_end);
325 if (ret != XGL_SUCCESS) {
326 intel_base_destroy(&region->obj.base);
327 return ret;
328 }
329
330 /* point to head */
331 region->cur = region->pool_begin;
332
333 region->obj.destroy = desc_region_destroy;
334
335 *region_ret = region;
336
337 return XGL_SUCCESS;
338}
339
340void intel_desc_region_destroy(struct intel_desc_region *region)
341{
342 intel_desc_pool_free(region->dev->desc_pool,
343 &region->pool_begin, &region->pool_end);
344 intel_base_destroy(&region->obj.base);
345}
346
347XGL_RESULT intel_desc_region_alloc(struct intel_desc_region *region,
348 const struct intel_desc_layout *layout,
349 struct intel_desc_offset *begin,
350 struct intel_desc_offset *end)
351{
352 *begin = region->cur;
353 intel_desc_offset_add(end, &region->cur, &layout->pool_size);
354
355 if (!intel_desc_offset_within(end, &region->pool_end))
356 return XGL_ERROR_OUT_OF_MEMORY;
357
358 /* increment the writer pointer */
359 region->cur = *end;
360
361 return XGL_SUCCESS;
362}
363
364void intel_desc_region_free_all(struct intel_desc_region *region)
365{
366 /* reset to head */
367 region->cur = region->pool_begin;
368}
369
370static void desc_set_destroy(struct intel_obj *obj)
371{
372 struct intel_desc_set *set = intel_desc_set_from_obj(obj);
373
374 intel_desc_set_destroy(set);
375}
376
377XGL_RESULT intel_desc_set_create(struct intel_dev *dev,
378 struct intel_desc_region *region,
379 XGL_DESCRIPTOR_SET_USAGE usage,
380 const struct intel_desc_layout *layout,
381 struct intel_desc_set **set_ret)
382{
383 struct intel_desc_set *set;
384 XGL_RESULT ret;
385
Chia-I Wu545c2e12015-02-22 13:19:54 +0800386 set = (struct intel_desc_set *) intel_base_create(&dev->base.handle,
387 sizeof(*set), dev->base.dbg, XGL_DBG_OBJECT_DESCRIPTOR_SET,
388 NULL, 0);
Chia-I Wuf8385062015-01-04 16:27:24 +0800389 if (!set)
390 return XGL_ERROR_OUT_OF_MEMORY;
391
392 set->pool = dev->desc_pool;
393 ret = intel_desc_region_alloc(region, layout,
394 &set->pool_begin, &set->pool_end);
395 if (ret != XGL_SUCCESS) {
396 intel_base_destroy(&set->obj.base);
397 return ret;
398 }
399
400 set->layout = layout;
401
402 set->obj.destroy = desc_set_destroy;
403
404 *set_ret = set;
405
406 return XGL_SUCCESS;
407}
408
409void intel_desc_set_destroy(struct intel_desc_set *set)
410{
411 intel_base_destroy(&set->obj.base);
412}
413
414static void desc_set_update(struct intel_desc_set *set,
415 const struct intel_desc_layout_iter *iter,
416 const struct intel_desc_surface *surfaces,
417 const struct intel_desc_sampler *samplers)
418{
419 struct intel_desc_offset begin, end;
420
421 intel_desc_offset_add(&begin, &set->pool_begin, &iter->offset_begin);
422 intel_desc_offset_add(&end, &set->pool_begin, &iter->offset_end);
423
424 intel_desc_pool_update(set->pool, &begin, &end, surfaces, samplers);
425}
426
427static bool desc_set_img_layout_read_only(XGL_IMAGE_LAYOUT layout)
428{
429 switch (layout) {
430 case XGL_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
431 case XGL_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
432 case XGL_IMAGE_LAYOUT_TRANSFER_SOURCE_OPTIMAL:
433 return true;
434 default:
435 return false;
436 }
437}
438
439void intel_desc_set_update_samplers(struct intel_desc_set *set,
440 const XGL_UPDATE_SAMPLERS *update)
441{
442 struct intel_desc_layout_iter iter;
443 uint32_t i;
444
445 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
446 return;
447
448 for (i = 0; i < update->count; i++) {
449 const struct intel_sampler *sampler =
450 intel_sampler((XGL_SAMPLER) update->pSamplers[i]);
451 struct intel_desc_sampler desc;
452
453 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER)
454 break;
455
456 desc.sampler = sampler;
457 desc_set_update(set, &iter, NULL, &desc);
458
459 if (!intel_desc_layout_advance_iter(set->layout, &iter))
460 break;
461 }
462}
463
464void intel_desc_set_update_sampler_textures(struct intel_desc_set *set,
465 const XGL_UPDATE_SAMPLER_TEXTURES *update)
466{
467 struct intel_desc_layout_iter iter;
468 const struct intel_sampler *immutable_sampler = NULL;
469 uint32_t i;
470
471 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
472 return;
473
474 for (i = 0; i < update->count; i++) {
475 const struct intel_sampler *sampler =
476 intel_sampler(update->pSamplerImageViews[i].pSampler);
477 const XGL_IMAGE_VIEW_ATTACH_INFO *info =
478 update->pSamplerImageViews[i].pImageView;
479 const struct intel_img_view *view = intel_img_view(info->view);
480 struct intel_desc_surface view_desc;
481 struct intel_desc_sampler sampler_desc;
482
483 if (iter.type != XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
484 return;
485
486 /* update every immutable sampler once */
487 if (immutable_sampler != iter.range->immutable_sampler) {
488 immutable_sampler = iter.range->immutable_sampler;
489
490 if (immutable_sampler) {
491 struct intel_desc_offset begin, end;
492
493 intel_desc_offset_add(&begin, &set->pool_begin,
494 &iter.offset_begin);
495 intel_desc_offset_set(&end, begin.surface,
496 begin.sampler + set->pool->sampler_desc_size);
497
498 sampler_desc.sampler = immutable_sampler;
499 intel_desc_pool_update(set->pool, &begin, &end,
500 NULL, &sampler_desc);
501 }
502 }
503
504 view_desc.mem = view->img->obj.mem;
505 view_desc.read_only = desc_set_img_layout_read_only(info->layout);
506 view_desc.type = INTEL_DESC_SURFACE_IMG;
507 view_desc.u.img = view;
508
509 sampler_desc.sampler = sampler;
510
511 desc_set_update(set, &iter, &view_desc, &sampler_desc);
512
513 if (!intel_desc_layout_advance_iter(set->layout, &iter))
514 break;
515 }
516}
517
518void intel_desc_set_update_images(struct intel_desc_set *set,
519 const XGL_UPDATE_IMAGES *update)
520{
521 struct intel_desc_layout_iter iter;
522 uint32_t i;
523
524 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
525 return;
526
527 for (i = 0; i < update->count; i++) {
528 const XGL_IMAGE_VIEW_ATTACH_INFO *info = update->pImageViews[i];
529 const struct intel_img_view *view = intel_img_view(info->view);
530 struct intel_desc_surface desc;
531
532 if (iter.type != update->descriptorType)
533 break;
534
535 desc.mem = view->img->obj.mem;
536 desc.read_only = desc_set_img_layout_read_only(info->layout);
537 desc.type = INTEL_DESC_SURFACE_IMG;
538 desc.u.img = view;
539 desc_set_update(set, &iter, &desc, NULL);
540
541 if (!intel_desc_layout_advance_iter(set->layout, &iter))
542 break;
543 }
544}
545
546void intel_desc_set_update_buffers(struct intel_desc_set *set,
547 const XGL_UPDATE_BUFFERS *update)
548{
549 struct intel_desc_layout_iter iter;
550 uint32_t i;
551
552 if (!intel_desc_layout_find_index(set->layout, update->index, &iter))
553 return;
554
555 for (i = 0; i < update->count; i++) {
556 const XGL_BUFFER_VIEW_ATTACH_INFO *info = update->pBufferViews[i];
557 const struct intel_buf_view *view = intel_buf_view(info->view);
558 struct intel_desc_surface desc;
559
560 if (iter.type != update->descriptorType)
561 break;
562
563 desc.mem = view->buf->obj.mem;
564 desc.read_only = false;
565 desc.type = INTEL_DESC_SURFACE_BUF;
566 desc.u.buf = view;
567 desc_set_update(set, &iter, &desc, NULL);
568
569 if (!intel_desc_layout_advance_iter(set->layout, &iter))
570 break;
571 }
572}
573
574void intel_desc_set_update_as_copy(struct intel_desc_set *set,
575 const XGL_UPDATE_AS_COPY *update)
576{
577 const struct intel_desc_set *src_set =
578 intel_desc_set(update->descriptorSet);
579 struct intel_desc_layout_iter iter, src_iter;
580 struct intel_desc_offset begin, end, src_begin;
581 uint32_t i;
582
583 /* disallow combined sampler textures */
584 if (update->descriptorType == XGL_DESCRIPTOR_TYPE_SAMPLER_TEXTURE)
585 return;
586
587 /* no update->index? */
588 if (!intel_desc_layout_find_index(set->layout, 0, &iter))
589 return;
590 if (!intel_desc_layout_find_index(src_set->layout,
591 update->descriptorIndex, &src_iter))
592 return;
593
594 intel_desc_offset_add(&begin, &set->pool_begin, &iter.offset_begin);
595 intel_desc_offset_add(&src_begin, &src_set->pool_begin,
596 &src_iter.offset_end);
597
598 /* advance to end */
599 for (i = 0; i < update->count; i++) {
600 if (iter.type != update->descriptorType ||
601 src_iter.type != update->descriptorType ||
602 !intel_desc_layout_advance_iter(set->layout, &iter) ||
603 !intel_desc_layout_advance_iter(src_set->layout, &src_iter))
604 break;
605 }
606 if (i < update->count)
607 return;
608
609 intel_desc_offset_add(&end, &src_set->pool_begin, &iter.offset_end);
610
611 intel_desc_pool_copy(src_set->pool, &begin, &end, &src_begin);
612}
613
Chia-I Wu2f0cba82015-02-12 10:15:42 -0700614static void desc_set_read(const struct intel_desc_set *set,
615 const struct intel_desc_offset *offset,
616 const struct intel_desc_surface **surface,
617 const struct intel_desc_sampler **sampler)
618{
619 struct intel_desc_offset begin, end;
620
621 intel_desc_offset_add(&begin, &set->pool_begin, offset);
622 intel_desc_offset_set(&end, 0, 0);
623
624 if (surface) {
625 *surface = (const struct intel_desc_surface *)
626 ((const char *) set->pool->surfaces + begin.surface);
627
628 end.surface = set->pool->surface_desc_size;
629 }
630
631 if (sampler) {
632 *sampler = (const struct intel_desc_sampler *)
633 ((const char *) set->pool->samplers + begin.sampler);
634
635 end.sampler = set->pool->sampler_desc_size;
636 }
637
638 intel_desc_offset_add(&end, &begin, &end);
639 desc_pool_validate_begin_end(set->pool, &begin, &end);
640}
641
642void intel_desc_set_read_surface(const struct intel_desc_set *set,
643 const struct intel_desc_offset *offset,
644 XGL_PIPELINE_SHADER_STAGE stage,
645 const struct intel_mem **mem,
646 bool *read_only,
647 const uint32_t **cmd,
648 uint32_t *cmd_len)
649{
650 const struct intel_desc_surface *desc;
651
652 desc_set_read(set, offset, &desc, NULL);
653
654 *mem = desc->mem;
655 *read_only = desc->read_only;
656 switch (desc->type) {
657 case INTEL_DESC_SURFACE_BUF:
658 *cmd = (stage == XGL_SHADER_STAGE_FRAGMENT) ?
659 desc->u.buf->fs_cmd : desc->u.buf->cmd;
660 *cmd_len = desc->u.buf->cmd_len;
661 break;
662 case INTEL_DESC_SURFACE_IMG:
663 *cmd = desc->u.img->cmd;
664 *cmd_len = desc->u.img->cmd_len;
665 break;
666 case INTEL_DESC_SURFACE_UNUSED:
667 default:
668 *cmd = NULL;
669 *cmd_len = 0;
670 break;
671 }
672}
673
674void intel_desc_set_read_sampler(const struct intel_desc_set *set,
675 const struct intel_desc_offset *offset,
676 const struct intel_sampler **sampler)
677{
678 const struct intel_desc_sampler *desc;
679
680 desc_set_read(set, offset, NULL, &desc);
681
682 *sampler = desc->sampler;
683}
684
Chia-I Wuf8385062015-01-04 16:27:24 +0800685static void desc_layout_destroy(struct intel_obj *obj)
686{
687 struct intel_desc_layout *layout = intel_desc_layout_from_obj(obj);
688
689 intel_desc_layout_destroy(layout);
690}
691
692static XGL_RESULT desc_layout_alloc_ranges(struct intel_desc_layout *layout,
693 const struct intel_desc_pool *pool,
694 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
695{
696 /* calculate counts */
697 while (info) {
698 if (info->sType !=
699 XGL_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO)
700 return XGL_ERROR_INVALID_POINTER;
701
702 switch (info->descriptorType) {
703 case XGL_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
704 case XGL_DESCRIPTOR_TYPE_SHADER_STORAGE_BUFFER_DYNAMIC:
705 case XGL_DESCRIPTOR_TYPE_RAW_BUFFER_DYNAMIC:
706 layout->dynamic_desc_count += info->count;
707 break;
708 default:
709 break;
710 }
711
712 layout->range_count++;
713 info = info->pNext;
714 }
715
Chia-I Wuf9c81ef2015-02-22 13:49:15 +0800716 layout->ranges = intel_alloc(layout, sizeof(layout->ranges[0]) *
Chia-I Wuf8385062015-01-04 16:27:24 +0800717 layout->range_count, 0, XGL_SYSTEM_ALLOC_INTERNAL);
718 if (!layout->ranges)
719 return XGL_ERROR_OUT_OF_MEMORY;
720
721 memset(layout->ranges, 0, sizeof(layout->ranges[0]) *
722 layout->range_count);
723
724 return XGL_SUCCESS;
725}
726
727static XGL_RESULT desc_layout_init_ranges(struct intel_desc_layout *layout,
728 const struct intel_desc_pool *pool,
729 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info)
730{
731 struct intel_desc_offset offset;
732 uint32_t index, i;
733 XGL_RESULT ret;
734
735 ret = desc_layout_alloc_ranges(layout, pool, info);
736 if (ret != XGL_SUCCESS)
737 return ret;
738
739 if (layout->prior_layout) {
740 index = layout->prior_layout->end;
741 offset = layout->prior_layout->pool_size;
742 } else {
743 index = 0;
744 intel_desc_offset_set(&offset, 0, 0);
745 }
746
747 layout->begin = index;
748
749 /* initialize the ranges */
750 for (i = 0; i < layout->range_count; i++, info = info->pNext) {
751 struct intel_desc_layout_range *range = &layout->ranges[i];
752 struct intel_desc_offset size;
753
754 ret = desc_pool_get_desc_size(pool, info->descriptorType, &size);
755 if (ret != XGL_SUCCESS)
756 return ret;
757
758 /* info->stageFlags does not gain us anything */
759 range->type = info->descriptorType;
760
761 range->begin = index;
762 range->offset = offset;
763
764 if (info->immutableSampler != XGL_NULL_HANDLE) {
765 range->immutable_sampler = intel_sampler(info->immutableSampler);
766 /* do not increment sampler offset */
767 intel_desc_offset_set(&range->increment, size.surface, 0);
768 } else {
769 range->immutable_sampler = NULL;
770 range->increment = size;
771 }
772
773 /* increment index and offset */
774 index += info->count;
775 intel_desc_offset_mad(&size, &range->increment, &size,
776 info->count - 1);
777 intel_desc_offset_add(&offset, &offset, &size);
778
779 range->end = index;
780 }
781
782 layout->end = index;
783 layout->pool_size = offset;
784
785 return XGL_SUCCESS;
786}
787
788static XGL_RESULT desc_layout_init_bind_points(struct intel_desc_layout *layout,
789 XGL_FLAGS stage_flags,
790 const uint32_t *bind_points)
791{
792 if (!bind_points)
793 return XGL_ERROR_INVALID_POINTER;
794
795 layout->stage_flags = stage_flags;
796
797 if (stage_flags == XGL_SHADER_STAGE_FLAGS_ALL) {
798 layout->bind_point_vs = *bind_points;
799 layout->bind_point_tcs = *bind_points;
800 layout->bind_point_tes = *bind_points;
801 layout->bind_point_gs = *bind_points;
802 layout->bind_point_fs = *bind_points;
803 layout->bind_point_cs = *bind_points;
804 } else {
805 if (stage_flags & XGL_SHADER_STAGE_FLAGS_VERTEX_BIT)
806 layout->bind_point_vs = *bind_points++;
807 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_CONTROL_BIT)
808 layout->bind_point_tcs = *bind_points++;
809 if (stage_flags & XGL_SHADER_STAGE_FLAGS_TESS_EVALUATION_BIT)
810 layout->bind_point_tes = *bind_points++;
811 if (stage_flags & XGL_SHADER_STAGE_FLAGS_GEOMETRY_BIT)
812 layout->bind_point_gs = *bind_points++;
813 if (stage_flags & XGL_SHADER_STAGE_FLAGS_FRAGMENT_BIT)
814 layout->bind_point_fs = *bind_points++;
815 if (stage_flags & XGL_SHADER_STAGE_FLAGS_COMPUTE_BIT)
816 layout->bind_point_cs = *bind_points++;
817 }
818
819 return XGL_SUCCESS;
820}
821
822XGL_RESULT intel_desc_layout_create(struct intel_dev *dev,
823 XGL_FLAGS stage_flags,
824 const uint32_t *bind_points,
825 const struct intel_desc_layout *prior_layout,
826 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO *info,
827 struct intel_desc_layout **layout_ret)
828{
829 struct intel_desc_layout *layout;
830 XGL_RESULT ret;
831
Chia-I Wu545c2e12015-02-22 13:19:54 +0800832 layout = (struct intel_desc_layout *) intel_base_create(&dev->base.handle,
833 sizeof(*layout), dev->base.dbg,
834 XGL_DBG_OBJECT_DESCRIPTOR_SET_LAYOUT, info, 0);
Chia-I Wuf8385062015-01-04 16:27:24 +0800835 if (!layout)
836 return XGL_ERROR_OUT_OF_MEMORY;
837
838 layout->prior_layout = prior_layout;
839
840 ret = desc_layout_init_bind_points(layout, stage_flags, bind_points);
841 if (ret == XGL_SUCCESS)
842 ret = desc_layout_init_ranges(layout, dev->desc_pool, info);
843 if (ret != XGL_SUCCESS) {
844 intel_desc_layout_destroy(layout);
845 return ret;
846 }
847
848 layout->obj.destroy = desc_layout_destroy;
849
850 *layout_ret = layout;
851
852 return XGL_SUCCESS;
853}
854
855void intel_desc_layout_destroy(struct intel_desc_layout *layout)
856{
Chia-I Wuf9c81ef2015-02-22 13:49:15 +0800857 intel_free(layout, layout->ranges);
Chia-I Wuf8385062015-01-04 16:27:24 +0800858 intel_base_destroy(&layout->obj.base);
859}
860
861static void desc_layout_init_iter(const struct intel_desc_layout *sublayout,
862 uint32_t index,
863 struct intel_desc_layout_iter *iter)
864{
865 const struct intel_desc_layout_range *range;
866
867 assert(index >= sublayout->begin && index < sublayout->end);
868
869 /* find the range the index is in */
870 for (range = sublayout->ranges;; range++) {
871 assert(range < sublayout->ranges + sublayout->range_count);
872 if (index < range->end)
873 break;
874 }
875
876 /* current position */
877 iter->sublayout = sublayout;
878 iter->range = range;
879 iter->index = index;
880
881 iter->type = iter->range->type;
882 intel_desc_offset_mad(&iter->offset_begin, &range->increment,
883 &range->offset, iter->index - range->begin);
884 intel_desc_offset_add(&iter->offset_end, &iter->offset_begin,
885 &range->increment);
886}
887
888bool intel_desc_layout_find_bind_point(const struct intel_desc_layout *layout,
889 XGL_PIPELINE_SHADER_STAGE stage,
890 uint32_t set, uint32_t binding,
891 struct intel_desc_layout_iter *iter)
892{
893 /* find the layout at the bind point */
894 switch (stage) {
895#define CASE(stage, s) \
896 case XGL_SHADER_STAGE_ ##stage: \
897 while (layout) { \
898 if ((layout->stage_flags & \
899 XGL_SHADER_STAGE_FLAGS_ ##stage## _BIT) && \
900 layout->bind_point_ ##s == set) \
901 break; \
902 layout = layout->prior_layout; \
903 } \
904 break
905 CASE(VERTEX, vs);
906 CASE(TESS_CONTROL, tcs);
907 CASE(TESS_EVALUATION, tes);
908 CASE(GEOMETRY, gs);
909 CASE(FRAGMENT, fs);
910 CASE(COMPUTE, cs);
911#undef CASE
912 default:
913 assert(!"unknown shader stage");
914 layout = NULL;
915 break;
916 }
917
918 if (!layout || layout->begin + binding >= layout->end) {
919 memset(iter, 0, sizeof(*iter));
920 return false;
921 }
922
923 desc_layout_init_iter(layout, layout->begin + binding, iter);
924
925 return true;
926}
927
928bool intel_desc_layout_find_index(const struct intel_desc_layout *layout,
929 uint32_t index,
930 struct intel_desc_layout_iter *iter)
931{
932 if (index >= layout->begin) {
933 /* out of bound */
934 if (index >= layout->end)
935 layout = NULL;
936 } else {
937 while (true) {
938 layout = layout->prior_layout;
939 if (index >= layout->begin) {
940 assert(index < layout->end);
941 break;
942 }
943 }
944 }
945
946 if (!layout) {
947 memset(iter, 0, sizeof(*iter));
948 return false;
949 }
950
951 desc_layout_init_iter(layout, index, iter);
952
953 return true;
954}
955
956bool intel_desc_layout_advance_iter(const struct intel_desc_layout *layout,
957 struct intel_desc_layout_iter *iter)
958{
959 /* all descriptors traversed */
960 if (!iter->sublayout)
961 return false;
962
963 iter->index++;
964 if (iter->index >= iter->range->end) {
965 /* move to the next range */
966 iter->range++;
967
968 if (iter->range >= iter->sublayout->ranges +
969 iter->sublayout->range_count) {
970 /* find again as the chain is not doubly-linked */
971 const bool ret = intel_desc_layout_find_index(layout,
972 iter->index, iter);
973 if (!ret) {
974 iter->sublayout = NULL;
975 iter->range = NULL;
976 }
977
978 return ret;
979 }
980
981 iter->type = iter->range->type;
982 }
983
984 iter->offset_begin = iter->offset_end;
985 intel_desc_offset_add(&iter->offset_end, &iter->offset_end,
986 &iter->range->increment);
987
988 return true;
989}
990
Chia-I Wude26bdf2015-02-18 15:47:12 -0700991ICD_EXPORT XGL_RESULT XGLAPI xglCreateDescriptorSetLayout(
Chia-I Wuf8385062015-01-04 16:27:24 +0800992 XGL_DEVICE device,
993 XGL_FLAGS stageFlags,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -0600994 const uint32_t* pSetBindPoints,
Chia-I Wuf8385062015-01-04 16:27:24 +0800995 XGL_DESCRIPTOR_SET_LAYOUT priorSetLayout,
996 const XGL_DESCRIPTOR_SET_LAYOUT_CREATE_INFO* pSetLayoutInfoList,
997 XGL_DESCRIPTOR_SET_LAYOUT* pSetLayout)
998{
999 struct intel_dev *dev = intel_dev(device);
1000 struct intel_desc_layout *prior_layout = intel_desc_layout(priorSetLayout);
1001
1002 return intel_desc_layout_create(dev, stageFlags, pSetBindPoints,
1003 prior_layout, pSetLayoutInfoList,
1004 (struct intel_desc_layout **) pSetLayout);
1005}
1006
Chia-I Wude26bdf2015-02-18 15:47:12 -07001007ICD_EXPORT XGL_RESULT XGLAPI xglBeginDescriptorRegionUpdate(
Chia-I Wuf8385062015-01-04 16:27:24 +08001008 XGL_DEVICE device,
1009 XGL_DESCRIPTOR_UPDATE_MODE updateMode)
1010{
1011 struct intel_dev *dev = intel_dev(device);
1012 struct intel_desc_pool *pool = dev->desc_pool;
1013
1014 return intel_desc_pool_begin_update(pool, updateMode);
1015}
1016
Chia-I Wude26bdf2015-02-18 15:47:12 -07001017ICD_EXPORT XGL_RESULT XGLAPI xglEndDescriptorRegionUpdate(
Chia-I Wuf8385062015-01-04 16:27:24 +08001018 XGL_DEVICE device,
1019 XGL_CMD_BUFFER cmd_)
1020{
1021 struct intel_dev *dev = intel_dev(device);
1022 struct intel_desc_pool *pool = dev->desc_pool;
1023 struct intel_cmd *cmd = intel_cmd(cmd_);
1024
1025 return intel_desc_pool_end_update(pool, cmd);
1026}
1027
Chia-I Wude26bdf2015-02-18 15:47:12 -07001028ICD_EXPORT XGL_RESULT XGLAPI xglCreateDescriptorRegion(
Chia-I Wuf8385062015-01-04 16:27:24 +08001029 XGL_DEVICE device,
1030 XGL_DESCRIPTOR_REGION_USAGE regionUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001031 uint32_t maxSets,
Chia-I Wuf8385062015-01-04 16:27:24 +08001032 const XGL_DESCRIPTOR_REGION_CREATE_INFO* pCreateInfo,
1033 XGL_DESCRIPTOR_REGION* pDescriptorRegion)
1034{
1035 struct intel_dev *dev = intel_dev(device);
1036
1037 return intel_desc_region_create(dev, regionUsage, maxSets, pCreateInfo,
1038 (struct intel_desc_region **) pDescriptorRegion);
1039}
1040
Chia-I Wude26bdf2015-02-18 15:47:12 -07001041ICD_EXPORT XGL_RESULT XGLAPI xglClearDescriptorRegion(
Chia-I Wuf8385062015-01-04 16:27:24 +08001042 XGL_DESCRIPTOR_REGION descriptorRegion)
1043{
1044 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1045
1046 intel_desc_region_free_all(region);
1047
1048 return XGL_SUCCESS;
1049}
1050
Chia-I Wude26bdf2015-02-18 15:47:12 -07001051ICD_EXPORT XGL_RESULT XGLAPI xglAllocDescriptorSets(
Chia-I Wuf8385062015-01-04 16:27:24 +08001052 XGL_DESCRIPTOR_REGION descriptorRegion,
1053 XGL_DESCRIPTOR_SET_USAGE setUsage,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001054 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001055 const XGL_DESCRIPTOR_SET_LAYOUT* pSetLayouts,
1056 XGL_DESCRIPTOR_SET* pDescriptorSets,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001057 uint32_t* pCount)
Chia-I Wuf8385062015-01-04 16:27:24 +08001058{
1059 struct intel_desc_region *region = intel_desc_region(descriptorRegion);
1060 struct intel_dev *dev = region->dev;
1061 XGL_RESULT ret = XGL_SUCCESS;
1062 uint32_t i;
1063
1064 for (i = 0; i < count; i++) {
1065 const struct intel_desc_layout *layout =
1066 intel_desc_layout((XGL_DESCRIPTOR_SET_LAYOUT) pSetLayouts[i]);
1067
1068 ret = intel_desc_set_create(dev, region, setUsage, layout,
1069 (struct intel_desc_set **) &pDescriptorSets[i]);
1070 if (ret != XGL_SUCCESS)
1071 break;
1072 }
1073
1074 if (pCount)
1075 *pCount = i;
1076
1077 return ret;
1078}
1079
Chia-I Wude26bdf2015-02-18 15:47:12 -07001080ICD_EXPORT void XGLAPI xglClearDescriptorSets(
Chia-I Wuf8385062015-01-04 16:27:24 +08001081 XGL_DESCRIPTOR_REGION descriptorRegion,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001082 uint32_t count,
Chia-I Wuf8385062015-01-04 16:27:24 +08001083 const XGL_DESCRIPTOR_SET* pDescriptorSets)
1084{
1085 uint32_t i;
1086
1087 for (i = 0; i < count; i++) {
1088 struct intel_desc_set *set =
1089 intel_desc_set((XGL_DESCRIPTOR_SET) pDescriptorSets[i]);
1090
1091 intel_desc_pool_clear(set->pool, &set->pool_begin, &set->pool_end);
1092 }
1093}
1094
Chia-I Wude26bdf2015-02-18 15:47:12 -07001095ICD_EXPORT void XGLAPI xglUpdateDescriptors(
Chia-I Wuf8385062015-01-04 16:27:24 +08001096 XGL_DESCRIPTOR_SET descriptorSet,
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001097 const void* pUpdateChain)
Chia-I Wuf8385062015-01-04 16:27:24 +08001098{
1099 struct intel_desc_set *set = intel_desc_set(descriptorSet);
1100 const union {
1101 struct {
1102 XGL_STRUCTURE_TYPE sType;
Mark Lobodzinskie2d07a52015-01-29 08:55:56 -06001103 const void* pNext;
Chia-I Wuf8385062015-01-04 16:27:24 +08001104 } common;
1105
1106 XGL_UPDATE_SAMPLERS samplers;
1107 XGL_UPDATE_SAMPLER_TEXTURES sampler_textures;
1108 XGL_UPDATE_IMAGES images;
1109 XGL_UPDATE_BUFFERS buffers;
1110 XGL_UPDATE_AS_COPY as_copy;
1111 } *u = pUpdateChain;
1112
1113 while (u) {
1114 switch (u->common.sType) {
1115 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLERS:
1116 intel_desc_set_update_samplers(set, &u->samplers);
1117 break;
1118 case XGL_STRUCTURE_TYPE_UPDATE_SAMPLER_TEXTURES:
1119 intel_desc_set_update_sampler_textures(set, &u->sampler_textures);
1120 break;
1121 case XGL_STRUCTURE_TYPE_UPDATE_IMAGES:
1122 intel_desc_set_update_images(set, &u->images);
1123 break;
1124 case XGL_STRUCTURE_TYPE_UPDATE_BUFFERS:
1125 intel_desc_set_update_buffers(set, &u->buffers);
1126 break;
1127 case XGL_STRUCTURE_TYPE_UPDATE_AS_COPY:
1128 intel_desc_set_update_as_copy(set, &u->as_copy);
1129 break;
1130 default:
1131 assert(!"unknown descriptor update");
1132 break;
1133 }
1134
1135 u = u->common.pNext;
1136 }
1137}