blob: 376705fc844ceec6963debf1d2bb978dce500165 [file] [log] [blame]
Boris Brezillona72bab12020-03-05 09:30:58 +01001/*
2 * Copyright (C) 2018 Alyssa Rosenzweig
3 * Copyright (C) 2020 Collabora Ltd.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24
Boris Brezillon0d75eb02020-03-06 09:59:56 +010025#include "util/macros.h"
Boris Brezillon836686d2020-03-06 09:45:31 +010026#include "util/u_prim.h"
Boris Brezillon5d9995e2020-03-06 08:02:14 +010027#include "util/u_vbuf.h"
Boris Brezillon0d75eb02020-03-06 09:59:56 +010028
29#include "panfrost-quirks.h"
30
Alyssa Rosenzweigc8d848b2020-07-07 16:24:41 -040031#include "pan_pool.h"
Boris Brezillon0b735a22020-03-05 09:46:42 +010032#include "pan_bo.h"
Boris Brezillona72bab12020-03-05 09:30:58 +010033#include "pan_cmdstream.h"
34#include "pan_context.h"
35#include "pan_job.h"
36
Alyssa Rosenzweig02a638c2020-03-23 19:10:06 -040037/* If a BO is accessed for a particular shader stage, will it be in the primary
38 * batch (vertex/tiler) or the secondary batch (fragment)? Anything but
39 * fragment will be primary, e.g. compute jobs will be considered
40 * "vertex/tiler" by analogy */
41
42static inline uint32_t
43panfrost_bo_access_for_stage(enum pipe_shader_type stage)
44{
45 assert(stage == PIPE_SHADER_FRAGMENT ||
46 stage == PIPE_SHADER_VERTEX ||
47 stage == PIPE_SHADER_COMPUTE);
48
49 return stage == PIPE_SHADER_FRAGMENT ?
50 PAN_BO_ACCESS_FRAGMENT :
51 PAN_BO_ACCESS_VERTEX_TILER;
52}
53
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020054static void
55panfrost_vt_emit_shared_memory(struct panfrost_context *ctx,
56 struct mali_vertex_tiler_postfix *postfix)
57{
58 struct panfrost_device *dev = pan_device(ctx->base.screen);
59 struct panfrost_batch *batch = panfrost_get_batch_for_fbo(ctx);
60
61 unsigned shift = panfrost_get_stack_shift(batch->stack_size);
62 struct mali_shared_memory shared = {
63 .stack_shift = shift,
64 .scratchpad = panfrost_batch_get_scratchpad(batch, shift, dev->thread_tls_alloc, dev->core_count)->gpu,
65 .shared_workgroup_count = ~0,
66 };
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -040067 postfix->shared_memory = panfrost_pool_upload(&batch->pool, &shared, sizeof(shared));
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020068}
Boris Brezillon0d75eb02020-03-06 09:59:56 +010069
Alyssa Rosenzweigaee68b02020-04-06 20:14:23 -040070static void
Boris Brezillon0d75eb02020-03-06 09:59:56 +010071panfrost_vt_attach_framebuffer(struct panfrost_context *ctx,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020072 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon0d75eb02020-03-06 09:59:56 +010073{
Boris Brezillon0d75eb02020-03-06 09:59:56 +010074 struct panfrost_batch *batch = panfrost_get_batch_for_fbo(ctx);
Alyssa Rosenzweig34a03102020-07-15 13:10:02 -040075 postfix->shared_memory = panfrost_batch_reserve_framebuffer(batch);
Boris Brezillon0d75eb02020-03-06 09:59:56 +010076}
77
Alyssa Rosenzweigaee68b02020-04-06 20:14:23 -040078static void
Boris Brezillone94076f2020-03-05 11:52:12 +010079panfrost_vt_update_rasterizer(struct panfrost_context *ctx,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020080 struct mali_vertex_tiler_prefix *prefix,
81 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillone94076f2020-03-05 11:52:12 +010082{
83 struct panfrost_rasterizer *rasterizer = ctx->rasterizer;
84
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020085 postfix->gl_enables |= 0x7;
86 SET_BIT(postfix->gl_enables, MALI_FRONT_CCW_TOP,
Boris Brezillone94076f2020-03-05 11:52:12 +010087 rasterizer && rasterizer->base.front_ccw);
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020088 SET_BIT(postfix->gl_enables, MALI_CULL_FACE_FRONT,
Boris Brezillone94076f2020-03-05 11:52:12 +010089 rasterizer && (rasterizer->base.cull_face & PIPE_FACE_FRONT));
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020090 SET_BIT(postfix->gl_enables, MALI_CULL_FACE_BACK,
Boris Brezillone94076f2020-03-05 11:52:12 +010091 rasterizer && (rasterizer->base.cull_face & PIPE_FACE_BACK));
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020092 SET_BIT(prefix->unknown_draw, MALI_DRAW_FLATSHADE_FIRST,
Boris Brezillone94076f2020-03-05 11:52:12 +010093 rasterizer && rasterizer->base.flatshade_first);
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +020094}
95
96void
97panfrost_vt_update_primitive_size(struct panfrost_context *ctx,
98 struct mali_vertex_tiler_prefix *prefix,
99 union midgard_primitive_size *primitive_size)
100{
101 struct panfrost_rasterizer *rasterizer = ctx->rasterizer;
Boris Brezillone94076f2020-03-05 11:52:12 +0100102
103 if (!panfrost_writes_point_size(ctx)) {
Alyssa Rosenzweig32dbc802020-08-05 18:44:36 -0400104 bool points = prefix->draw_mode == MALI_DRAW_MODE_POINTS;
Boris Brezillone94076f2020-03-05 11:52:12 +0100105 float val = 0.0f;
106
107 if (rasterizer)
108 val = points ?
109 rasterizer->base.point_size :
110 rasterizer->base.line_width;
111
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200112 primitive_size->constant = val;
Boris Brezillone94076f2020-03-05 11:52:12 +0100113 }
114}
115
Alyssa Rosenzweigaee68b02020-04-06 20:14:23 -0400116static void
Boris Brezillon56aeb922020-03-05 11:47:04 +0100117panfrost_vt_update_occlusion_query(struct panfrost_context *ctx,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200118 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon56aeb922020-03-05 11:47:04 +0100119{
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200120 SET_BIT(postfix->gl_enables, MALI_OCCLUSION_QUERY, ctx->occlusion_query);
Icecream953e3958c2020-06-28 20:48:36 +1200121 if (ctx->occlusion_query) {
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200122 postfix->occlusion_counter = ctx->occlusion_query->bo->gpu;
Icecream953e3958c2020-06-28 20:48:36 +1200123 panfrost_batch_add_bo(ctx->batch, ctx->occlusion_query->bo,
124 PAN_BO_ACCESS_SHARED |
125 PAN_BO_ACCESS_RW |
126 PAN_BO_ACCESS_FRAGMENT);
127 } else {
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200128 postfix->occlusion_counter = 0;
Icecream953e3958c2020-06-28 20:48:36 +1200129 }
Boris Brezillon56aeb922020-03-05 11:47:04 +0100130}
131
Boris Brezillond66ef692020-03-06 11:31:06 +0100132void
133panfrost_vt_init(struct panfrost_context *ctx,
134 enum pipe_shader_type stage,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200135 struct mali_vertex_tiler_prefix *prefix,
136 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillond66ef692020-03-06 11:31:06 +0100137{
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200138 struct panfrost_device *device = pan_device(ctx->base.screen);
139
Boris Brezillond66ef692020-03-06 11:31:06 +0100140 if (!ctx->shader[stage])
141 return;
142
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200143 memset(prefix, 0, sizeof(*prefix));
144 memset(postfix, 0, sizeof(*postfix));
145
146 if (device->quirks & IS_BIFROST) {
147 postfix->gl_enables = 0x2;
148 panfrost_vt_emit_shared_memory(ctx, postfix);
149 } else {
150 postfix->gl_enables = 0x6;
151 panfrost_vt_attach_framebuffer(ctx, postfix);
152 }
Boris Brezillond66ef692020-03-06 11:31:06 +0100153
154 if (stage == PIPE_SHADER_FRAGMENT) {
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200155 panfrost_vt_update_occlusion_query(ctx, postfix);
156 panfrost_vt_update_rasterizer(ctx, prefix, postfix);
Boris Brezillond66ef692020-03-06 11:31:06 +0100157 }
158}
159
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100160static unsigned
161panfrost_translate_index_size(unsigned size)
162{
163 switch (size) {
164 case 1:
165 return MALI_DRAW_INDEXED_UINT8;
166
167 case 2:
168 return MALI_DRAW_INDEXED_UINT16;
169
170 case 4:
171 return MALI_DRAW_INDEXED_UINT32;
172
173 default:
174 unreachable("Invalid index size");
175 }
176}
177
178/* Gets a GPU address for the associated index buffer. Only gauranteed to be
179 * good for the duration of the draw (transient), could last longer. Also get
180 * the bounds on the index buffer for the range accessed by the draw. We do
181 * these operations together because there are natural optimizations which
182 * require them to be together. */
183
184static mali_ptr
185panfrost_get_index_buffer_bounded(struct panfrost_context *ctx,
186 const struct pipe_draw_info *info,
187 unsigned *min_index, unsigned *max_index)
188{
189 struct panfrost_resource *rsrc = pan_resource(info->index.resource);
190 struct panfrost_batch *batch = panfrost_get_batch_for_fbo(ctx);
191 off_t offset = info->start * info->index_size;
192 bool needs_indices = true;
193 mali_ptr out = 0;
194
195 if (info->max_index != ~0u) {
196 *min_index = info->min_index;
197 *max_index = info->max_index;
198 needs_indices = false;
199 }
200
201 if (!info->has_user_indices) {
202 /* Only resources can be directly mapped */
203 panfrost_batch_add_bo(batch, rsrc->bo,
204 PAN_BO_ACCESS_SHARED |
205 PAN_BO_ACCESS_READ |
206 PAN_BO_ACCESS_VERTEX_TILER);
207 out = rsrc->bo->gpu + offset;
208
209 /* Check the cache */
210 needs_indices = !panfrost_minmax_cache_get(rsrc->index_cache,
211 info->start,
212 info->count,
213 min_index,
214 max_index);
215 } else {
216 /* Otherwise, we need to upload to transient memory */
217 const uint8_t *ibuf8 = (const uint8_t *) info->index.user;
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -0400218 out = panfrost_pool_upload(&batch->pool, ibuf8 + offset,
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100219 info->count *
220 info->index_size);
221 }
222
223 if (needs_indices) {
224 /* Fallback */
225 u_vbuf_get_minmax_index(&ctx->base, info, min_index, max_index);
226
227 if (!info->has_user_indices)
228 panfrost_minmax_cache_add(rsrc->index_cache,
229 info->start, info->count,
230 *min_index, *max_index);
231 }
232
233 return out;
234}
235
236void
237panfrost_vt_set_draw_info(struct panfrost_context *ctx,
238 const struct pipe_draw_info *info,
239 enum mali_draw_mode draw_mode,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200240 struct mali_vertex_tiler_postfix *vertex_postfix,
241 struct mali_vertex_tiler_prefix *tiler_prefix,
242 struct mali_vertex_tiler_postfix *tiler_postfix,
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100243 unsigned *vertex_count,
244 unsigned *padded_count)
245{
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200246 tiler_prefix->draw_mode = draw_mode;
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100247
248 unsigned draw_flags = 0;
249
250 if (panfrost_writes_point_size(ctx))
251 draw_flags |= MALI_DRAW_VARYING_SIZE;
252
253 if (info->primitive_restart)
254 draw_flags |= MALI_DRAW_PRIMITIVE_RESTART_FIXED_INDEX;
255
256 /* These doesn't make much sense */
257
258 draw_flags |= 0x3000;
259
260 if (info->index_size) {
261 unsigned min_index = 0, max_index = 0;
262
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200263 tiler_prefix->indices = panfrost_get_index_buffer_bounded(ctx,
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100264 info,
265 &min_index,
266 &max_index);
267
268 /* Use the corresponding values */
269 *vertex_count = max_index - min_index + 1;
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200270 tiler_postfix->offset_start = vertex_postfix->offset_start = min_index + info->index_bias;
271 tiler_prefix->offset_bias_correction = -min_index;
272 tiler_prefix->index_count = MALI_POSITIVE(info->count);
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100273 draw_flags |= panfrost_translate_index_size(info->index_size);
274 } else {
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200275 tiler_prefix->indices = 0;
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100276 *vertex_count = ctx->vertex_count;
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200277 tiler_postfix->offset_start = vertex_postfix->offset_start = info->start;
278 tiler_prefix->offset_bias_correction = 0;
279 tiler_prefix->index_count = MALI_POSITIVE(ctx->vertex_count);
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100280 }
281
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200282 tiler_prefix->unknown_draw = draw_flags;
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100283
284 /* Encode the padded vertex count */
285
286 if (info->instance_count > 1) {
287 *padded_count = panfrost_padded_vertex_count(*vertex_count);
288
289 unsigned shift = __builtin_ctz(ctx->padded_count);
290 unsigned k = ctx->padded_count >> (shift + 1);
291
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200292 tiler_postfix->instance_shift = vertex_postfix->instance_shift = shift;
293 tiler_postfix->instance_odd = vertex_postfix->instance_odd = k;
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100294 } else {
295 *padded_count = *vertex_count;
296
297 /* Reset instancing state */
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200298 tiler_postfix->instance_shift = vertex_postfix->instance_shift = 0;
299 tiler_postfix->instance_odd = vertex_postfix->instance_odd = 0;
Boris Brezillon5d9995e2020-03-06 08:02:14 +0100300 }
301}
302
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100303static void
304panfrost_shader_meta_init(struct panfrost_context *ctx,
305 enum pipe_shader_type st,
306 struct mali_shader_meta *meta)
307{
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400308 const struct panfrost_device *dev = pan_device(ctx->base.screen);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100309 struct panfrost_shader_state *ss = panfrost_get_shader_state(ctx, st);
310
311 memset(meta, 0, sizeof(*meta));
312 meta->shader = (ss->bo ? ss->bo->gpu : 0) | ss->first_tag;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100313 meta->attribute_count = ss->attribute_count;
314 meta->varying_count = ss->varying_count;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100315 meta->texture_count = ctx->sampler_view_count[st];
316 meta->sampler_count = ctx->sampler_count[st];
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400317
318 if (dev->quirks & IS_BIFROST) {
Alyssa Rosenzweigd6588b82020-04-30 10:32:11 +0200319 if (st == PIPE_SHADER_VERTEX)
320 meta->bifrost1.unk1 = 0x800000;
321 else {
322 /* First clause ATEST |= 0x4000000.
323 * Less than 32 regs |= 0x200 */
Alyssa Rosenzweigee6a5a52020-05-26 19:48:25 -0400324 meta->bifrost1.unk1 = 0x950020;
Alyssa Rosenzweigd6588b82020-04-30 10:32:11 +0200325 }
326
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400327 meta->bifrost1.uniform_buffer_count = panfrost_ubo_count(ctx, st);
Alyssa Rosenzweigd6588b82020-04-30 10:32:11 +0200328 if (st == PIPE_SHADER_VERTEX)
329 meta->bifrost2.preload_regs = 0xC0;
Alyssa Rosenzweig20f6c7a2020-05-29 16:06:39 -0400330 else {
Alyssa Rosenzweigd6588b82020-04-30 10:32:11 +0200331 meta->bifrost2.preload_regs = 0x1;
Alyssa Rosenzweig20f6c7a2020-05-29 16:06:39 -0400332 SET_BIT(meta->bifrost2.preload_regs, 0x10, ss->reads_frag_coord);
333 }
334
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400335 meta->bifrost2.uniform_count = MIN2(ss->uniform_count,
336 ss->uniform_cutoff);
337 } else {
338 meta->midgard1.uniform_count = MIN2(ss->uniform_count,
339 ss->uniform_cutoff);
340 meta->midgard1.work_count = ss->work_reg_count;
Alyssa Rosenzweig55e33052020-05-27 16:07:00 -0400341
342 /* TODO: This is not conformant on ES3 */
343 meta->midgard1.flags_hi = MALI_SUPPRESS_INF_NAN;
344
Alyssa Rosenzweigdce77222020-06-02 14:12:29 -0400345 meta->midgard1.flags_lo = 0x20;
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400346 meta->midgard1.uniform_buffer_count = panfrost_ubo_count(ctx, st);
Alyssa Rosenzweigdce77222020-06-02 14:12:29 -0400347
348 SET_BIT(meta->midgard1.flags_hi, MALI_WRITES_GLOBAL, ss->writes_global);
Alyssa Rosenzweigdd095712020-04-06 17:50:38 -0400349 }
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100350}
351
Boris Brezillon2b946a12020-03-05 16:26:56 +0100352static unsigned
Boris Brezillon2b946a12020-03-05 16:26:56 +0100353translate_tex_wrap(enum pipe_tex_wrap w)
354{
355 switch (w) {
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400356 case PIPE_TEX_WRAP_REPEAT: return MALI_WRAP_MODE_REPEAT;
357 case PIPE_TEX_WRAP_CLAMP: return MALI_WRAP_MODE_CLAMP;
358 case PIPE_TEX_WRAP_CLAMP_TO_EDGE: return MALI_WRAP_MODE_CLAMP_TO_EDGE;
359 case PIPE_TEX_WRAP_CLAMP_TO_BORDER: return MALI_WRAP_MODE_CLAMP_TO_BORDER;
360 case PIPE_TEX_WRAP_MIRROR_REPEAT: return MALI_WRAP_MODE_MIRRORED_REPEAT;
361 case PIPE_TEX_WRAP_MIRROR_CLAMP: return MALI_WRAP_MODE_MIRRORED_CLAMP;
362 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_EDGE: return MALI_WRAP_MODE_MIRRORED_CLAMP_TO_EDGE;
363 case PIPE_TEX_WRAP_MIRROR_CLAMP_TO_BORDER: return MALI_WRAP_MODE_MIRRORED_CLAMP_TO_BORDER;
364 default: unreachable("Invalid wrap");
365 }
366}
Boris Brezillon2b946a12020-03-05 16:26:56 +0100367
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400368/* The hardware compares in the wrong order order, so we have to flip before
369 * encoding. Yes, really. */
Boris Brezillon2b946a12020-03-05 16:26:56 +0100370
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400371static enum mali_func
372panfrost_sampler_compare_func(const struct pipe_sampler_state *cso)
373{
374 if (!cso->compare_mode)
375 return MALI_FUNC_NEVER;
Boris Brezillon2b946a12020-03-05 16:26:56 +0100376
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400377 enum mali_func f = panfrost_translate_compare_func(cso->compare_func);
378 return panfrost_flip_compare_func(f);
379}
Boris Brezillon2b946a12020-03-05 16:26:56 +0100380
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400381static enum mali_mipmap_mode
382pan_pipe_to_mipmode(enum pipe_tex_mipfilter f)
383{
384 switch (f) {
385 case PIPE_TEX_MIPFILTER_NEAREST: return MALI_MIPMAP_MODE_NEAREST;
386 case PIPE_TEX_MIPFILTER_LINEAR: return MALI_MIPMAP_MODE_TRILINEAR;
387 case PIPE_TEX_MIPFILTER_NONE: return MALI_MIPMAP_MODE_NONE;
388 default: unreachable("Invalid");
Boris Brezillon2b946a12020-03-05 16:26:56 +0100389 }
390}
391
392void panfrost_sampler_desc_init(const struct pipe_sampler_state *cso,
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400393 struct mali_midgard_sampler_packed *hw)
Boris Brezillon2b946a12020-03-05 16:26:56 +0100394{
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400395 pan_pack(hw, MIDGARD_SAMPLER, cfg) {
396 cfg.magnify_nearest = cso->mag_img_filter == PIPE_TEX_FILTER_NEAREST;
397 cfg.minify_nearest = cso->min_img_filter == PIPE_TEX_FILTER_NEAREST;
398 cfg.mipmap_mode = (cso->min_mip_filter == PIPE_TEX_MIPFILTER_LINEAR) ?
399 MALI_MIPMAP_MODE_TRILINEAR : MALI_MIPMAP_MODE_NEAREST;
400 cfg.normalized_coordinates = cso->normalized_coords;
Boris Brezillon2b946a12020-03-05 16:26:56 +0100401
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400402 cfg.lod_bias = FIXED_16(cso->lod_bias, true);
Boris Brezillon2b946a12020-03-05 16:26:56 +0100403
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400404 cfg.minimum_lod = FIXED_16(cso->min_lod, false);
Boris Brezillon2b946a12020-03-05 16:26:56 +0100405
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400406 /* If necessary, we disable mipmapping in the sampler descriptor by
407 * clamping the LOD as tight as possible (from 0 to epsilon,
408 * essentially -- remember these are fixed point numbers, so
409 * epsilon=1/256) */
410
411 cfg.maximum_lod = (cso->min_mip_filter == PIPE_TEX_MIPFILTER_NONE) ?
412 cfg.minimum_lod + 1 :
413 FIXED_16(cso->max_lod, false);
414
415 cfg.wrap_mode_s = translate_tex_wrap(cso->wrap_s);
416 cfg.wrap_mode_t = translate_tex_wrap(cso->wrap_t);
417 cfg.wrap_mode_r = translate_tex_wrap(cso->wrap_r);
418
419 cfg.compare_function = panfrost_sampler_compare_func(cso);
420 cfg.seamless_cube_map = cso->seamless_cube_map;
421
422 cfg.border_color_r = cso->border_color.f[0];
Icecream958557b1a2020-08-13 19:35:00 +1200423 cfg.border_color_g = cso->border_color.f[1];
424 cfg.border_color_b = cso->border_color.f[2];
425 cfg.border_color_a = cso->border_color.f[3];
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -0400426 }
Boris Brezillon2b946a12020-03-05 16:26:56 +0100427}
428
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +0200429void panfrost_sampler_desc_init_bifrost(const struct pipe_sampler_state *cso,
Alyssa Rosenzweigb10c3c82020-08-11 18:25:03 -0400430 struct mali_bifrost_sampler_packed *hw)
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +0200431{
Alyssa Rosenzweigb10c3c82020-08-11 18:25:03 -0400432 pan_pack(hw, BIFROST_SAMPLER, cfg) {
433 cfg.magnify_linear = cso->mag_img_filter == PIPE_TEX_FILTER_LINEAR;
434 cfg.minify_linear = cso->min_img_filter == PIPE_TEX_FILTER_LINEAR;
435 cfg.mipmap_mode = pan_pipe_to_mipmode(cso->min_mip_filter);
436 cfg.normalized_coordinates = cso->normalized_coords;
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +0200437
Alyssa Rosenzweigb10c3c82020-08-11 18:25:03 -0400438 cfg.lod_bias = FIXED_16(cso->lod_bias, true);
439 cfg.minimum_lod = FIXED_16(cso->min_lod, false);
440 cfg.maximum_lod = FIXED_16(cso->max_lod, false);
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +0200441
Alyssa Rosenzweigb10c3c82020-08-11 18:25:03 -0400442 cfg.wrap_mode_s = translate_tex_wrap(cso->wrap_s);
443 cfg.wrap_mode_t = translate_tex_wrap(cso->wrap_t);
444 cfg.wrap_mode_r = translate_tex_wrap(cso->wrap_r);
445
446 cfg.compare_function = panfrost_sampler_compare_func(cso);
447 cfg.seamless_cube_map = cso->seamless_cube_map;
448 }
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +0200449}
450
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100451static void
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100452panfrost_frag_meta_rasterizer_update(struct panfrost_context *ctx,
453 struct mali_shader_meta *fragmeta)
454{
455 if (!ctx->rasterizer) {
456 SET_BIT(fragmeta->unknown2_4, MALI_NO_MSAA, true);
457 SET_BIT(fragmeta->unknown2_3, MALI_HAS_MSAA, false);
458 fragmeta->depth_units = 0.0f;
459 fragmeta->depth_factor = 0.0f;
460 SET_BIT(fragmeta->unknown2_4, MALI_DEPTH_RANGE_A, false);
461 SET_BIT(fragmeta->unknown2_4, MALI_DEPTH_RANGE_B, false);
Icecream95ec628ab2020-05-14 15:58:04 +1200462 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_CLIP_NEAR, true);
463 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_CLIP_FAR, true);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100464 return;
465 }
466
Icecream95fd92dc62020-05-14 15:54:09 +1200467 struct pipe_rasterizer_state *rast = &ctx->rasterizer->base;
468
469 bool msaa = rast->multisample;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100470
471 /* TODO: Sample size */
472 SET_BIT(fragmeta->unknown2_3, MALI_HAS_MSAA, msaa);
473 SET_BIT(fragmeta->unknown2_4, MALI_NO_MSAA, !msaa);
Alyssa Rosenzweig82256042020-07-15 11:38:39 -0400474
Icecream952a6db942020-07-16 14:22:38 +1200475 struct panfrost_shader_state *fs;
476 fs = panfrost_get_shader_state(ctx, PIPE_SHADER_FRAGMENT);
477
478 /* EXT_shader_framebuffer_fetch requires the shader to be run
479 * per-sample when outputs are read. */
480 bool per_sample = ctx->min_samples > 1 || fs->outputs_read;
481 SET_BIT(fragmeta->unknown2_3, MALI_PER_SAMPLE, msaa && per_sample);
Alyssa Rosenzweig82256042020-07-15 11:38:39 -0400482
Icecream95fd92dc62020-05-14 15:54:09 +1200483 fragmeta->depth_units = rast->offset_units * 2.0f;
484 fragmeta->depth_factor = rast->offset_scale;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100485
486 /* XXX: Which bit is which? Does this maybe allow offseting not-tri? */
487
Icecream95fd92dc62020-05-14 15:54:09 +1200488 SET_BIT(fragmeta->unknown2_4, MALI_DEPTH_RANGE_A, rast->offset_tri);
489 SET_BIT(fragmeta->unknown2_4, MALI_DEPTH_RANGE_B, rast->offset_tri);
Icecream95ec628ab2020-05-14 15:58:04 +1200490
491 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_CLIP_NEAR, rast->depth_clip_near);
492 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_CLIP_FAR, rast->depth_clip_far);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100493}
494
495static void
496panfrost_frag_meta_zsa_update(struct panfrost_context *ctx,
497 struct mali_shader_meta *fragmeta)
498{
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400499 const struct panfrost_zsa_state *so = ctx->depth_stencil;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100500 int zfunc = PIPE_FUNC_ALWAYS;
501
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400502 if (!so) {
Alyssa Rosenzweig1675d922020-08-06 10:02:35 -0400503 /* If stenciling is disabled, the state is irrelevant */
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100504 SET_BIT(fragmeta->unknown2_4, MALI_STENCIL_TEST, false);
505 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_WRITEMASK, false);
506 } else {
507 SET_BIT(fragmeta->unknown2_4, MALI_STENCIL_TEST,
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400508 so->base.stencil[0].enabled);
Alyssa Rosenzweig931e7b42020-08-05 22:02:32 -0400509
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400510 fragmeta->stencil_mask_front = so->stencil_mask_front;
511 fragmeta->stencil_mask_back = so->stencil_mask_back;
512
513 /* Bottom bits for stencil ref, exactly one word */
514 fragmeta->stencil_front.opaque[0] = so->stencil_front.opaque[0] | ctx->stencil_ref.ref_value[0];
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100515
516 /* If back-stencil is not enabled, use the front values */
517
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400518 if (so->base.stencil[1].enabled)
519 fragmeta->stencil_back.opaque[0] = so->stencil_back.opaque[0] | ctx->stencil_ref.ref_value[1];
520 else
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100521 fragmeta->stencil_back = fragmeta->stencil_front;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100522
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400523 if (so->base.depth.enabled)
524 zfunc = so->base.depth.func;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100525
526 /* Depth state (TODO: Refactor) */
527
528 SET_BIT(fragmeta->unknown2_3, MALI_DEPTH_WRITEMASK,
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400529 so->base.depth.writemask);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100530 }
531
532 fragmeta->unknown2_3 &= ~MALI_DEPTH_FUNC_MASK;
533 fragmeta->unknown2_3 |= MALI_DEPTH_FUNC(panfrost_translate_compare_func(zfunc));
534}
535
Alyssa Rosenzweig1085f742020-05-21 15:49:30 -0400536static bool
537panfrost_fs_required(
538 struct panfrost_shader_state *fs,
539 struct panfrost_blend_final *blend,
540 unsigned rt_count)
541{
542 /* If we generally have side effects */
543 if (fs->fs_sidefx)
544 return true;
545
546 /* If colour is written we need to execute */
547 for (unsigned i = 0; i < rt_count; ++i) {
548 if (!blend[i].no_colour)
549 return true;
550 }
551
552 /* If depth is written and not implied we need to execute.
553 * TODO: Predicate on Z/S writes being enabled */
554 return (fs->writes_depth || fs->writes_stencil);
555}
556
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100557static void
558panfrost_frag_meta_blend_update(struct panfrost_context *ctx,
559 struct mali_shader_meta *fragmeta,
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200560 void *rts)
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100561{
Alyssa Rosenzweig5d0d8fa2020-07-15 17:35:58 -0400562 struct panfrost_batch *batch = panfrost_get_batch_for_fbo(ctx);
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400563 const struct panfrost_device *dev = pan_device(ctx->base.screen);
Alyssa Rosenzweigef573252020-05-20 12:40:02 -0400564 struct panfrost_shader_state *fs;
565 fs = panfrost_get_shader_state(ctx, PIPE_SHADER_FRAGMENT);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100566
567 SET_BIT(fragmeta->unknown2_4, MALI_NO_DITHER,
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400568 (dev->quirks & MIDGARD_SFBD) && ctx->blend &&
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100569 !ctx->blend->base.dither);
570
Alyssa Rosenzweig490fbce2020-07-02 11:17:37 -0400571 SET_BIT(fragmeta->unknown2_4, MALI_ALPHA_TO_COVERAGE,
572 ctx->blend->base.alpha_to_coverage);
573
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100574 /* Get blending setup */
575 unsigned rt_count = MAX2(ctx->pipe_framebuffer.nr_cbufs, 1);
576
577 struct panfrost_blend_final blend[PIPE_MAX_COLOR_BUFS];
578 unsigned shader_offset = 0;
579 struct panfrost_bo *shader_bo = NULL;
580
581 for (unsigned c = 0; c < rt_count; ++c)
582 blend[c] = panfrost_get_blend_for_context(ctx, c, &shader_bo,
583 &shader_offset);
584
Alyssa Rosenzweig1085f742020-05-21 15:49:30 -0400585 /* Disable shader execution if we can */
586 if (dev->quirks & MIDGARD_SHADERLESS
587 && !panfrost_fs_required(fs, blend, rt_count)) {
588 fragmeta->shader = 0;
589 fragmeta->attribute_count = 0;
590 fragmeta->varying_count = 0;
591 fragmeta->texture_count = 0;
592 fragmeta->sampler_count = 0;
593
594 /* This feature is not known to work on Bifrost */
595 fragmeta->midgard1.work_count = 1;
596 fragmeta->midgard1.uniform_count = 0;
597 fragmeta->midgard1.uniform_buffer_count = 0;
598 }
599
Alyssa Rosenzweig6dd11a62020-05-21 15:48:03 -0400600 /* If there is a blend shader, work registers are shared. We impose 8
601 * work registers as a limit for blend shaders. Should be lower XXX */
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100602
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200603 if (!(dev->quirks & IS_BIFROST)) {
604 for (unsigned c = 0; c < rt_count; ++c) {
Alyssa Rosenzweig6dd11a62020-05-21 15:48:03 -0400605 if (blend[c].is_shader) {
606 fragmeta->midgard1.work_count =
607 MAX2(fragmeta->midgard1.work_count, 8);
608 }
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200609 }
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100610 }
611
612 /* Even on MFBD, the shader descriptor gets blend shaders. It's *also*
613 * copied to the blend_meta appended (by convention), but this is the
614 * field actually read by the hardware. (Or maybe both are read...?).
615 * Specify the last RTi with a blend shader. */
616
617 fragmeta->blend.shader = 0;
618
619 for (signed rt = (rt_count - 1); rt >= 0; --rt) {
620 if (!blend[rt].is_shader)
621 continue;
622
623 fragmeta->blend.shader = blend[rt].shader.gpu |
624 blend[rt].shader.first_tag;
625 break;
626 }
627
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400628 if (dev->quirks & MIDGARD_SFBD) {
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100629 /* When only a single render target platform is used, the blend
630 * information is inside the shader meta itself. We additionally
631 * need to signal CAN_DISCARD for nontrivial blend modes (so
632 * we're able to read back the destination buffer) */
633
634 SET_BIT(fragmeta->unknown2_3, MALI_HAS_BLEND_SHADER,
635 blend[0].is_shader);
636
637 if (!blend[0].is_shader) {
638 fragmeta->blend.equation = *blend[0].equation.equation;
639 fragmeta->blend.constant = blend[0].equation.constant;
640 }
641
642 SET_BIT(fragmeta->unknown2_3, MALI_CAN_DISCARD,
Alyssa Rosenzweigef573252020-05-20 12:40:02 -0400643 !blend[0].no_blending || fs->can_discard);
Alyssa Rosenzweig5d0d8fa2020-07-15 17:35:58 -0400644
645 batch->draws |= PIPE_CLEAR_COLOR0;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100646 return;
647 }
648
Alyssa Rosenzweigee6a5a52020-05-26 19:48:25 -0400649 if (dev->quirks & IS_BIFROST) {
650 bool no_blend = true;
651
652 for (unsigned i = 0; i < rt_count; ++i)
653 no_blend &= (blend[i].no_blending | blend[i].no_colour);
654
655 SET_BIT(fragmeta->bifrost1.unk1, MALI_BIFROST_EARLY_Z,
656 !fs->can_discard && !fs->writes_depth && no_blend);
657 }
658
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100659 /* Additional blend descriptor tacked on for jobs using MFBD */
660
661 for (unsigned i = 0; i < rt_count; ++i) {
Alyssa Rosenzweig3d6cc142020-05-26 18:48:04 -0400662 unsigned flags = 0;
663
664 if (ctx->pipe_framebuffer.nr_cbufs > i && !blend[i].no_colour) {
665 flags = 0x200;
Alyssa Rosenzweig5d0d8fa2020-07-15 17:35:58 -0400666 batch->draws |= (PIPE_CLEAR_COLOR0 << i);
Alyssa Rosenzweig3d6cc142020-05-26 18:48:04 -0400667
668 bool is_srgb = (ctx->pipe_framebuffer.nr_cbufs > i) &&
669 (ctx->pipe_framebuffer.cbufs[i]) &&
670 util_format_is_srgb(ctx->pipe_framebuffer.cbufs[i]->format);
671
672 SET_BIT(flags, MALI_BLEND_MRT_SHADER, blend[i].is_shader);
673 SET_BIT(flags, MALI_BLEND_LOAD_TIB, !blend[i].no_blending);
674 SET_BIT(flags, MALI_BLEND_SRGB, is_srgb);
675 SET_BIT(flags, MALI_BLEND_NO_DITHER, !ctx->blend->base.dither);
676 }
677
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200678 if (dev->quirks & IS_BIFROST) {
679 struct bifrost_blend_rt *brts = rts;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100680
Alyssa Rosenzweig3d6cc142020-05-26 18:48:04 -0400681 brts[i].flags = flags;
682
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200683 if (blend[i].is_shader) {
684 /* The blend shader's address needs to be at
685 * the same top 32 bit as the fragment shader.
686 * TODO: Ensure that's always the case.
687 */
688 assert((blend[i].shader.gpu & (0xffffffffull << 32)) ==
689 (fs->bo->gpu & (0xffffffffull << 32)));
690 brts[i].shader = blend[i].shader.gpu;
691 brts[i].unk2 = 0x0;
Alyssa Rosenzweig83cd3f02020-05-04 12:48:50 -0400692 } else if (ctx->pipe_framebuffer.nr_cbufs > i) {
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200693 enum pipe_format format = ctx->pipe_framebuffer.cbufs[i]->format;
694 const struct util_format_description *format_desc;
695 format_desc = util_format_description(format);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100696
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200697 brts[i].equation = *blend[i].equation.equation;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100698
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200699 /* TODO: this is a bit more complicated */
700 brts[i].constant = blend[i].equation.constant;
701
702 brts[i].format = panfrost_format_to_bifrost_blend(format_desc);
Alyssa Rosenzweige3692fd2020-05-26 18:50:29 -0400703
704 /* 0x19 disables blending and forces REPLACE
705 * mode (equivalent to rgb_mode = alpha_mode =
706 * x122, colour mask = 0xF). 0x1a allows
707 * blending. */
708 brts[i].unk2 = blend[i].no_blending ? 0x19 : 0x1a;
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200709
710 brts[i].shader_type = fs->blend_types[i];
Alyssa Rosenzweig83cd3f02020-05-04 12:48:50 -0400711 } else {
712 /* Dummy attachment for depth-only */
713 brts[i].unk2 = 0x3;
714 brts[i].shader_type = fs->blend_types[i];
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200715 }
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100716 } else {
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200717 struct midgard_blend_rt *mrts = rts;
Alyssa Rosenzweig3d6cc142020-05-26 18:48:04 -0400718 mrts[i].flags = flags;
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200719
720 if (blend[i].is_shader) {
721 mrts[i].blend.shader = blend[i].shader.gpu | blend[i].shader.first_tag;
722 } else {
723 mrts[i].blend.equation = *blend[i].equation.equation;
724 mrts[i].blend.constant = blend[i].equation.constant;
725 }
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100726 }
727 }
728}
729
730static void
731panfrost_frag_shader_meta_init(struct panfrost_context *ctx,
732 struct mali_shader_meta *fragmeta,
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200733 void *rts)
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100734{
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400735 const struct panfrost_device *dev = pan_device(ctx->base.screen);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100736 struct panfrost_shader_state *fs;
737
738 fs = panfrost_get_shader_state(ctx, PIPE_SHADER_FRAGMENT);
739
Alyssa Rosenzweigf23cdd42020-07-02 10:07:08 -0400740 bool msaa = ctx->rasterizer && ctx->rasterizer->base.multisample;
Alyssa Rosenzweigc4c8c8e2020-08-06 10:31:28 -0400741 fragmeta->coverage_mask = msaa ? ctx->sample_mask : ~0;
Alyssa Rosenzweigf23cdd42020-07-02 10:07:08 -0400742
Icecream95ec628ab2020-05-14 15:58:04 +1200743 fragmeta->unknown2_3 = MALI_DEPTH_FUNC(MALI_FUNC_ALWAYS) | 0x10;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100744 fragmeta->unknown2_4 = 0x4e0;
745
746 /* unknown2_4 has 0x10 bit set on T6XX and T720. We don't know why this
747 * is required (independent of 32-bit/64-bit descriptors), or why it's
748 * not used on later GPU revisions. Otherwise, all shader jobs fault on
749 * these earlier chips (perhaps this is a chicken bit of some kind).
750 * More investigation is needed. */
751
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400752 SET_BIT(fragmeta->unknown2_4, 0x10, dev->quirks & MIDGARD_SFBD);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100753
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200754 if (dev->quirks & IS_BIFROST) {
755 /* TODO */
756 } else {
757 /* Depending on whether it's legal to in the given shader, we try to
Alyssa Rosenzweigdce77222020-06-02 14:12:29 -0400758 * enable early-z testing. TODO: respect e-z force */
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100759
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200760 SET_BIT(fragmeta->midgard1.flags_lo, MALI_EARLY_Z,
Icecream953a1a40b2020-06-06 22:32:04 +1200761 !fs->can_discard && !fs->writes_global &&
Alyssa Rosenzweig490fbce2020-07-02 11:17:37 -0400762 !fs->writes_depth && !fs->writes_stencil &&
763 !ctx->blend->base.alpha_to_coverage);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100764
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200765 /* Add the writes Z/S flags if needed. */
766 SET_BIT(fragmeta->midgard1.flags_lo, MALI_WRITES_Z, fs->writes_depth);
767 SET_BIT(fragmeta->midgard1.flags_hi, MALI_WRITES_S, fs->writes_stencil);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100768
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200769 /* Any time texturing is used, derivatives are implicitly calculated,
770 * so we need to enable helper invocations */
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100771
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200772 SET_BIT(fragmeta->midgard1.flags_lo, MALI_HELPER_INVOCATIONS,
773 fs->helper_invocations);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100774
Icecream95ca44c002020-07-17 21:23:03 +1200775 /* If discard is enabled, which bit we set to convey this
776 * depends on if depth/stencil is used for the draw or not.
777 * Just one of depth OR stencil is enough to trigger this. */
Icecream950dd24b32020-05-12 14:08:09 +1200778
Alyssa Rosenzweig6afd4ad2020-08-12 11:42:11 -0400779 const struct pipe_depth_stencil_alpha_state *zsa = &ctx->depth_stencil->base;
Icecream95ca44c002020-07-17 21:23:03 +1200780 bool zs_enabled = fs->writes_depth || fs->writes_stencil;
781
782 if (zsa) {
783 zs_enabled |= (zsa->depth.enabled && zsa->depth.func != PIPE_FUNC_ALWAYS);
784 zs_enabled |= zsa->stencil[0].enabled;
785 }
Icecream950dd24b32020-05-12 14:08:09 +1200786
Icecream95e6032482020-07-06 19:41:28 +1200787 SET_BIT(fragmeta->midgard1.flags_lo, MALI_READS_TILEBUFFER,
Icecream95ca44c002020-07-17 21:23:03 +1200788 fs->outputs_read || (!zs_enabled && fs->can_discard));
789 SET_BIT(fragmeta->midgard1.flags_lo, MALI_READS_ZS, zs_enabled && fs->can_discard);
Tomeu Vizoso9c3e8222020-05-07 14:56:17 +0200790 }
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100791
792 panfrost_frag_meta_rasterizer_update(ctx, fragmeta);
793 panfrost_frag_meta_zsa_update(ctx, fragmeta);
794 panfrost_frag_meta_blend_update(ctx, fragmeta, rts);
795}
796
Boris Brezillon56aeb922020-03-05 11:47:04 +0100797void
Boris Brezillon5d33d422020-03-05 11:02:56 +0100798panfrost_emit_shader_meta(struct panfrost_batch *batch,
799 enum pipe_shader_type st,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200800 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon5d33d422020-03-05 11:02:56 +0100801{
802 struct panfrost_context *ctx = batch->ctx;
803 struct panfrost_shader_state *ss = panfrost_get_shader_state(ctx, st);
804
805 if (!ss) {
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200806 postfix->shader = 0;
Boris Brezillon5d33d422020-03-05 11:02:56 +0100807 return;
808 }
809
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100810 struct mali_shader_meta meta;
811
812 panfrost_shader_meta_init(ctx, st, &meta);
813
Boris Brezillon5d33d422020-03-05 11:02:56 +0100814 /* Add the shader BO to the batch. */
815 panfrost_batch_add_bo(batch, ss->bo,
816 PAN_BO_ACCESS_PRIVATE |
817 PAN_BO_ACCESS_READ |
818 panfrost_bo_access_for_stage(st));
819
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100820 mali_ptr shader_ptr;
821
822 if (st == PIPE_SHADER_FRAGMENT) {
Alyssa Rosenzweigca8c6252020-03-23 18:44:21 -0400823 struct panfrost_device *dev = pan_device(ctx->base.screen);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100824 unsigned rt_count = MAX2(ctx->pipe_framebuffer.nr_cbufs, 1);
825 size_t desc_size = sizeof(meta);
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200826 void *rts = NULL;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100827 struct panfrost_transfer xfer;
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200828 unsigned rt_size;
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100829
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200830 if (dev->quirks & MIDGARD_SFBD)
831 rt_size = 0;
832 else if (dev->quirks & IS_BIFROST)
833 rt_size = sizeof(struct bifrost_blend_rt);
834 else
835 rt_size = sizeof(struct midgard_blend_rt);
836
837 desc_size += rt_size * rt_count;
838
839 if (rt_size)
840 rts = rzalloc_size(ctx, rt_size * rt_count);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100841
842 panfrost_frag_shader_meta_init(ctx, &meta, rts);
843
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -0400844 xfer = panfrost_pool_alloc(&batch->pool, desc_size);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100845
846 memcpy(xfer.cpu, &meta, sizeof(meta));
Tomeu Vizoso3c98c452020-04-24 08:40:51 +0200847 memcpy(xfer.cpu + sizeof(meta), rts, rt_size * rt_count);
848
849 if (rt_size)
850 ralloc_free(rts);
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100851
852 shader_ptr = xfer.gpu;
853 } else {
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -0400854 shader_ptr = panfrost_pool_upload(&batch->pool, &meta,
Boris Brezillonb02f97c2020-03-05 16:20:18 +0100855 sizeof(meta));
856 }
857
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200858 postfix->shader = shader_ptr;
Boris Brezillon5d33d422020-03-05 11:02:56 +0100859}
860
Boris Brezillona72bab12020-03-05 09:30:58 +0100861void
862panfrost_emit_viewport(struct panfrost_batch *batch,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +0200863 struct mali_vertex_tiler_postfix *tiler_postfix)
Boris Brezillona72bab12020-03-05 09:30:58 +0100864{
865 struct panfrost_context *ctx = batch->ctx;
Alyssa Rosenzweig7f487e02020-08-05 19:33:20 -0400866 const struct pipe_viewport_state *vp = &ctx->pipe_viewport;
867 const struct pipe_scissor_state *ss = &ctx->scissor;
868 const struct pipe_rasterizer_state *rast = &ctx->rasterizer->base;
869 const struct pipe_framebuffer_state *fb = &ctx->pipe_framebuffer;
Boris Brezillona72bab12020-03-05 09:30:58 +0100870
Alyssa Rosenzweig7f487e02020-08-05 19:33:20 -0400871 /* Derive min/max from translate/scale. Note since |x| >= 0 by
872 * definition, we have that -|x| <= |x| hence translate - |scale| <=
873 * translate + |scale|, so the ordering is correct here. */
874 float vp_minx = (int) (vp->translate[0] - fabsf(vp->scale[0]));
875 float vp_maxx = (int) (vp->translate[0] + fabsf(vp->scale[0]));
876 float vp_miny = (int) (vp->translate[1] - fabsf(vp->scale[1]));
877 float vp_maxy = (int) (vp->translate[1] + fabsf(vp->scale[1]));
878 float minz = (vp->translate[2] - fabsf(vp->scale[2]));
879 float maxz = (vp->translate[2] + fabsf(vp->scale[2]));
Boris Brezillona72bab12020-03-05 09:30:58 +0100880
Alyssa Rosenzweig7f487e02020-08-05 19:33:20 -0400881 /* Scissor to the intersection of viewport and to the scissor, clamped
882 * to the framebuffer */
Boris Brezillona72bab12020-03-05 09:30:58 +0100883
Alyssa Rosenzweig7f487e02020-08-05 19:33:20 -0400884 unsigned minx = MIN2(fb->width, vp_minx);
885 unsigned maxx = MIN2(fb->width, vp_maxx);
886 unsigned miny = MIN2(fb->height, vp_miny);
887 unsigned maxy = MIN2(fb->height, vp_maxy);
Boris Brezillona72bab12020-03-05 09:30:58 +0100888
Alyssa Rosenzweig7f487e02020-08-05 19:33:20 -0400889 if (ss && rast && rast->scissor) {
890 minx = MAX2(ss->minx, minx);
891 miny = MAX2(ss->miny, miny);
892 maxx = MIN2(ss->maxx, maxx);
893 maxy = MIN2(ss->maxy, maxy);
894 }
895
896 struct panfrost_transfer T = panfrost_pool_alloc(&batch->pool, MALI_VIEWPORT_LENGTH);
897
898 pan_pack(T.cpu, VIEWPORT, cfg) {
899 cfg.scissor_minimum_x = minx;
900 cfg.scissor_minimum_y = miny;
901 cfg.scissor_maximum_x = maxx - 1;
902 cfg.scissor_maximum_y = maxy - 1;
903
904 cfg.minimum_z = rast->depth_clip_near ? minz : -INFINITY;
905 cfg.maximum_z = rast->depth_clip_far ? maxz : INFINITY;
906 }
907
908 tiler_postfix->viewport = T.gpu;
909 panfrost_batch_union_scissor(batch, minx, miny, maxx, maxy);
Boris Brezillona72bab12020-03-05 09:30:58 +0100910}
Boris Brezillon0b735a22020-03-05 09:46:42 +0100911
912static mali_ptr
913panfrost_map_constant_buffer_gpu(struct panfrost_batch *batch,
914 enum pipe_shader_type st,
915 struct panfrost_constant_buffer *buf,
916 unsigned index)
917{
918 struct pipe_constant_buffer *cb = &buf->cb[index];
919 struct panfrost_resource *rsrc = pan_resource(cb->buffer);
920
921 if (rsrc) {
922 panfrost_batch_add_bo(batch, rsrc->bo,
923 PAN_BO_ACCESS_SHARED |
924 PAN_BO_ACCESS_READ |
925 panfrost_bo_access_for_stage(st));
926
927 /* Alignment gauranteed by
928 * PIPE_CAP_CONSTANT_BUFFER_OFFSET_ALIGNMENT */
929 return rsrc->bo->gpu + cb->buffer_offset;
930 } else if (cb->user_buffer) {
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -0400931 return panfrost_pool_upload(&batch->pool,
Boris Brezillon0b735a22020-03-05 09:46:42 +0100932 cb->user_buffer +
933 cb->buffer_offset,
934 cb->buffer_size);
935 } else {
936 unreachable("No constant buffer");
937 }
938}
939
940struct sysval_uniform {
941 union {
942 float f[4];
943 int32_t i[4];
944 uint32_t u[4];
945 uint64_t du[2];
946 };
947};
948
949static void
950panfrost_upload_viewport_scale_sysval(struct panfrost_batch *batch,
951 struct sysval_uniform *uniform)
952{
953 struct panfrost_context *ctx = batch->ctx;
954 const struct pipe_viewport_state *vp = &ctx->pipe_viewport;
955
956 uniform->f[0] = vp->scale[0];
957 uniform->f[1] = vp->scale[1];
958 uniform->f[2] = vp->scale[2];
959}
960
961static void
962panfrost_upload_viewport_offset_sysval(struct panfrost_batch *batch,
963 struct sysval_uniform *uniform)
964{
965 struct panfrost_context *ctx = batch->ctx;
966 const struct pipe_viewport_state *vp = &ctx->pipe_viewport;
967
968 uniform->f[0] = vp->translate[0];
969 uniform->f[1] = vp->translate[1];
970 uniform->f[2] = vp->translate[2];
971}
972
973static void panfrost_upload_txs_sysval(struct panfrost_batch *batch,
974 enum pipe_shader_type st,
975 unsigned int sysvalid,
976 struct sysval_uniform *uniform)
977{
978 struct panfrost_context *ctx = batch->ctx;
979 unsigned texidx = PAN_SYSVAL_ID_TO_TXS_TEX_IDX(sysvalid);
980 unsigned dim = PAN_SYSVAL_ID_TO_TXS_DIM(sysvalid);
981 bool is_array = PAN_SYSVAL_ID_TO_TXS_IS_ARRAY(sysvalid);
982 struct pipe_sampler_view *tex = &ctx->sampler_views[st][texidx]->base;
983
984 assert(dim);
985 uniform->i[0] = u_minify(tex->texture->width0, tex->u.tex.first_level);
986
987 if (dim > 1)
988 uniform->i[1] = u_minify(tex->texture->height0,
989 tex->u.tex.first_level);
990
991 if (dim > 2)
992 uniform->i[2] = u_minify(tex->texture->depth0,
993 tex->u.tex.first_level);
994
995 if (is_array)
996 uniform->i[dim] = tex->texture->array_size;
997}
998
999static void
1000panfrost_upload_ssbo_sysval(struct panfrost_batch *batch,
1001 enum pipe_shader_type st,
1002 unsigned ssbo_id,
1003 struct sysval_uniform *uniform)
1004{
1005 struct panfrost_context *ctx = batch->ctx;
1006
1007 assert(ctx->ssbo_mask[st] & (1 << ssbo_id));
1008 struct pipe_shader_buffer sb = ctx->ssbo[st][ssbo_id];
1009
1010 /* Compute address */
1011 struct panfrost_bo *bo = pan_resource(sb.buffer)->bo;
1012
1013 panfrost_batch_add_bo(batch, bo,
1014 PAN_BO_ACCESS_SHARED | PAN_BO_ACCESS_RW |
1015 panfrost_bo_access_for_stage(st));
1016
1017 /* Upload address and size as sysval */
1018 uniform->du[0] = bo->gpu + sb.buffer_offset;
1019 uniform->u[2] = sb.buffer_size;
1020}
1021
1022static void
1023panfrost_upload_sampler_sysval(struct panfrost_batch *batch,
1024 enum pipe_shader_type st,
1025 unsigned samp_idx,
1026 struct sysval_uniform *uniform)
1027{
1028 struct panfrost_context *ctx = batch->ctx;
1029 struct pipe_sampler_state *sampl = &ctx->samplers[st][samp_idx]->base;
1030
1031 uniform->f[0] = sampl->min_lod;
1032 uniform->f[1] = sampl->max_lod;
1033 uniform->f[2] = sampl->lod_bias;
1034
1035 /* Even without any errata, Midgard represents "no mipmapping" as
1036 * fixing the LOD with the clamps; keep behaviour consistent. c.f.
1037 * panfrost_create_sampler_state which also explains our choice of
1038 * epsilon value (again to keep behaviour consistent) */
1039
1040 if (sampl->min_mip_filter == PIPE_TEX_MIPFILTER_NONE)
1041 uniform->f[1] = uniform->f[0] + (1.0/256.0);
1042}
1043
1044static void
1045panfrost_upload_num_work_groups_sysval(struct panfrost_batch *batch,
1046 struct sysval_uniform *uniform)
1047{
1048 struct panfrost_context *ctx = batch->ctx;
1049
1050 uniform->u[0] = ctx->compute_grid->grid[0];
1051 uniform->u[1] = ctx->compute_grid->grid[1];
1052 uniform->u[2] = ctx->compute_grid->grid[2];
1053}
1054
1055static void
1056panfrost_upload_sysvals(struct panfrost_batch *batch, void *buf,
1057 struct panfrost_shader_state *ss,
1058 enum pipe_shader_type st)
1059{
1060 struct sysval_uniform *uniforms = (void *)buf;
1061
1062 for (unsigned i = 0; i < ss->sysval_count; ++i) {
1063 int sysval = ss->sysval[i];
1064
1065 switch (PAN_SYSVAL_TYPE(sysval)) {
1066 case PAN_SYSVAL_VIEWPORT_SCALE:
1067 panfrost_upload_viewport_scale_sysval(batch,
1068 &uniforms[i]);
1069 break;
1070 case PAN_SYSVAL_VIEWPORT_OFFSET:
1071 panfrost_upload_viewport_offset_sysval(batch,
1072 &uniforms[i]);
1073 break;
1074 case PAN_SYSVAL_TEXTURE_SIZE:
1075 panfrost_upload_txs_sysval(batch, st,
1076 PAN_SYSVAL_ID(sysval),
1077 &uniforms[i]);
1078 break;
1079 case PAN_SYSVAL_SSBO:
1080 panfrost_upload_ssbo_sysval(batch, st,
1081 PAN_SYSVAL_ID(sysval),
1082 &uniforms[i]);
1083 break;
1084 case PAN_SYSVAL_NUM_WORK_GROUPS:
1085 panfrost_upload_num_work_groups_sysval(batch,
1086 &uniforms[i]);
1087 break;
1088 case PAN_SYSVAL_SAMPLER:
1089 panfrost_upload_sampler_sysval(batch, st,
1090 PAN_SYSVAL_ID(sysval),
1091 &uniforms[i]);
1092 break;
1093 default:
1094 assert(0);
1095 }
1096 }
1097}
1098
1099static const void *
1100panfrost_map_constant_buffer_cpu(struct panfrost_constant_buffer *buf,
1101 unsigned index)
1102{
1103 struct pipe_constant_buffer *cb = &buf->cb[index];
1104 struct panfrost_resource *rsrc = pan_resource(cb->buffer);
1105
1106 if (rsrc)
1107 return rsrc->bo->cpu;
1108 else if (cb->user_buffer)
1109 return cb->user_buffer;
1110 else
1111 unreachable("No constant buffer");
1112}
1113
1114void
1115panfrost_emit_const_buf(struct panfrost_batch *batch,
1116 enum pipe_shader_type stage,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02001117 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon0b735a22020-03-05 09:46:42 +01001118{
1119 struct panfrost_context *ctx = batch->ctx;
1120 struct panfrost_shader_variants *all = ctx->shader[stage];
1121
1122 if (!all)
1123 return;
1124
1125 struct panfrost_constant_buffer *buf = &ctx->constant_buffer[stage];
1126
1127 struct panfrost_shader_state *ss = &all->variants[all->active_variant];
1128
1129 /* Uniforms are implicitly UBO #0 */
1130 bool has_uniforms = buf->enabled_mask & (1 << 0);
1131
1132 /* Allocate room for the sysval and the uniforms */
1133 size_t sys_size = sizeof(float) * 4 * ss->sysval_count;
1134 size_t uniform_size = has_uniforms ? (buf->cb[0].buffer_size) : 0;
1135 size_t size = sys_size + uniform_size;
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04001136 struct panfrost_transfer transfer = panfrost_pool_alloc(&batch->pool,
Boris Brezillon0b735a22020-03-05 09:46:42 +01001137 size);
1138
1139 /* Upload sysvals requested by the shader */
1140 panfrost_upload_sysvals(batch, transfer.cpu, ss, stage);
1141
1142 /* Upload uniforms */
1143 if (has_uniforms && uniform_size) {
1144 const void *cpu = panfrost_map_constant_buffer_cpu(buf, 0);
1145 memcpy(transfer.cpu + sys_size, cpu, uniform_size);
1146 }
1147
Boris Brezillon0b735a22020-03-05 09:46:42 +01001148 /* Next up, attach UBOs. UBO #0 is the uniforms we just
1149 * uploaded */
1150
1151 unsigned ubo_count = panfrost_ubo_count(ctx, stage);
1152 assert(ubo_count >= 1);
1153
Alyssa Rosenzweigfa949672020-08-05 21:39:25 -04001154 size_t sz = MALI_UNIFORM_BUFFER_LENGTH * ubo_count;
1155 struct panfrost_transfer ubos = panfrost_pool_alloc(&batch->pool, sz);
1156 uint64_t *ubo_ptr = (uint64_t *) ubos.cpu;
Boris Brezillon0b735a22020-03-05 09:46:42 +01001157
1158 /* Upload uniforms as a UBO */
Alyssa Rosenzweigfa949672020-08-05 21:39:25 -04001159
1160 if (ss->uniform_count) {
1161 pan_pack(ubo_ptr, UNIFORM_BUFFER, cfg) {
1162 cfg.entries = ss->uniform_count;
1163 cfg.pointer = transfer.gpu;
1164 }
1165 } else {
1166 *ubo_ptr = 0;
1167 }
Boris Brezillon0b735a22020-03-05 09:46:42 +01001168
1169 /* The rest are honest-to-goodness UBOs */
1170
1171 for (unsigned ubo = 1; ubo < ubo_count; ++ubo) {
1172 size_t usz = buf->cb[ubo].buffer_size;
1173 bool enabled = buf->enabled_mask & (1 << ubo);
1174 bool empty = usz == 0;
1175
1176 if (!enabled || empty) {
Alyssa Rosenzweigfa949672020-08-05 21:39:25 -04001177 ubo_ptr[ubo] = 0;
Boris Brezillon0b735a22020-03-05 09:46:42 +01001178 continue;
1179 }
1180
Alyssa Rosenzweigfa949672020-08-05 21:39:25 -04001181 pan_pack(ubo_ptr + ubo, UNIFORM_BUFFER, cfg) {
1182 cfg.entries = DIV_ROUND_UP(usz, 16);
1183 cfg.pointer = panfrost_map_constant_buffer_gpu(batch,
1184 stage, buf, ubo);
1185 }
Boris Brezillon0b735a22020-03-05 09:46:42 +01001186 }
1187
Boris Brezillon0b735a22020-03-05 09:46:42 +01001188 postfix->uniforms = transfer.gpu;
Alyssa Rosenzweigfa949672020-08-05 21:39:25 -04001189 postfix->uniform_buffers = ubos.gpu;
Boris Brezillon0b735a22020-03-05 09:46:42 +01001190
1191 buf->dirty_mask = 0;
1192}
Boris Brezillon36725be2020-03-05 09:57:44 +01001193
1194void
1195panfrost_emit_shared_memory(struct panfrost_batch *batch,
1196 const struct pipe_grid_info *info,
1197 struct midgard_payload_vertex_tiler *vtp)
1198{
1199 struct panfrost_context *ctx = batch->ctx;
1200 struct panfrost_shader_variants *all = ctx->shader[PIPE_SHADER_COMPUTE];
1201 struct panfrost_shader_state *ss = &all->variants[all->active_variant];
1202 unsigned single_size = util_next_power_of_two(MAX2(ss->shared_size,
1203 128));
1204 unsigned shared_size = single_size * info->grid[0] * info->grid[1] *
1205 info->grid[2] * 4;
1206 struct panfrost_bo *bo = panfrost_batch_get_shared_memory(batch,
1207 shared_size,
1208 1);
1209
1210 struct mali_shared_memory shared = {
1211 .shared_memory = bo->gpu,
1212 .shared_workgroup_count =
1213 util_logbase2_ceil(info->grid[0]) +
1214 util_logbase2_ceil(info->grid[1]) +
1215 util_logbase2_ceil(info->grid[2]),
1216 .shared_unk1 = 0x2,
1217 .shared_shift = util_logbase2(single_size) - 1
1218 };
1219
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04001220 vtp->postfix.shared_memory = panfrost_pool_upload(&batch->pool, &shared,
Boris Brezillon36725be2020-03-05 09:57:44 +01001221 sizeof(shared));
1222}
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001223
1224static mali_ptr
1225panfrost_get_tex_desc(struct panfrost_batch *batch,
1226 enum pipe_shader_type st,
1227 struct panfrost_sampler_view *view)
1228{
1229 if (!view)
1230 return (mali_ptr) 0;
1231
1232 struct pipe_sampler_view *pview = &view->base;
1233 struct panfrost_resource *rsrc = pan_resource(pview->texture);
1234
1235 /* Add the BO to the job so it's retained until the job is done. */
1236
1237 panfrost_batch_add_bo(batch, rsrc->bo,
1238 PAN_BO_ACCESS_SHARED | PAN_BO_ACCESS_READ |
1239 panfrost_bo_access_for_stage(st));
1240
Alyssa Rosenzweig32b171d2020-06-15 09:20:39 -04001241 panfrost_batch_add_bo(batch, view->bo,
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001242 PAN_BO_ACCESS_SHARED | PAN_BO_ACCESS_READ |
1243 panfrost_bo_access_for_stage(st));
1244
Alyssa Rosenzweig32b171d2020-06-15 09:20:39 -04001245 return view->bo->gpu;
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001246}
1247
Icecream95fafc3052020-06-12 20:14:02 +12001248static void
1249panfrost_update_sampler_view(struct panfrost_sampler_view *view,
1250 struct pipe_context *pctx)
1251{
1252 struct panfrost_resource *rsrc = pan_resource(view->base.texture);
Icecream9565b3b082020-06-20 19:09:03 +12001253 if (view->texture_bo != rsrc->bo->gpu ||
Alyssa Rosenzweig965537df2020-07-22 10:23:50 -04001254 view->modifier != rsrc->modifier) {
Alyssa Rosenzweig32b171d2020-06-15 09:20:39 -04001255 panfrost_bo_unreference(view->bo);
Icecream95fafc3052020-06-12 20:14:02 +12001256 panfrost_create_sampler_view_bo(view, pctx, &rsrc->base);
1257 }
1258}
1259
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001260void
1261panfrost_emit_texture_descriptors(struct panfrost_batch *batch,
1262 enum pipe_shader_type stage,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02001263 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001264{
1265 struct panfrost_context *ctx = batch->ctx;
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001266 struct panfrost_device *device = pan_device(ctx->base.screen);
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001267
1268 if (!ctx->sampler_view_count[stage])
1269 return;
1270
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001271 if (device->quirks & IS_BIFROST) {
Alyssa Rosenzweigad0b32c2020-08-06 18:12:28 -04001272 struct panfrost_transfer T = panfrost_pool_alloc(&batch->pool,
1273 MALI_BIFROST_TEXTURE_LENGTH *
1274 ctx->sampler_view_count[stage]);
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001275
Alyssa Rosenzweigad0b32c2020-08-06 18:12:28 -04001276 struct mali_bifrost_texture_packed *out =
1277 (struct mali_bifrost_texture_packed *) T.cpu;
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001278
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001279 for (int i = 0; i < ctx->sampler_view_count[stage]; ++i) {
1280 struct panfrost_sampler_view *view = ctx->sampler_views[stage][i];
1281 struct pipe_sampler_view *pview = &view->base;
1282 struct panfrost_resource *rsrc = pan_resource(pview->texture);
Alyssa Rosenzweigad0b32c2020-08-06 18:12:28 -04001283
Alyssa Rosenzweig65e0e892020-06-15 09:23:27 -04001284 panfrost_update_sampler_view(view, &ctx->base);
Alyssa Rosenzweigad0b32c2020-08-06 18:12:28 -04001285 out[i] = view->bifrost_descriptor;
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001286
Tomeu Vizoso3a81abf2020-05-01 11:37:56 +02001287 /* Add the BOs to the job so they are retained until the job is done. */
1288
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001289 panfrost_batch_add_bo(batch, rsrc->bo,
1290 PAN_BO_ACCESS_SHARED | PAN_BO_ACCESS_READ |
1291 panfrost_bo_access_for_stage(stage));
1292
Alyssa Rosenzweig32b171d2020-06-15 09:20:39 -04001293 panfrost_batch_add_bo(batch, view->bo,
Tomeu Vizoso3a81abf2020-05-01 11:37:56 +02001294 PAN_BO_ACCESS_SHARED | PAN_BO_ACCESS_READ |
1295 panfrost_bo_access_for_stage(stage));
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001296 }
1297
Alyssa Rosenzweigad0b32c2020-08-06 18:12:28 -04001298 postfix->textures = T.gpu;
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001299 } else {
1300 uint64_t trampolines[PIPE_MAX_SHADER_SAMPLER_VIEWS];
1301
Icecream95fafc3052020-06-12 20:14:02 +12001302 for (int i = 0; i < ctx->sampler_view_count[stage]; ++i) {
1303 struct panfrost_sampler_view *view = ctx->sampler_views[stage][i];
1304
1305 panfrost_update_sampler_view(view, &ctx->base);
1306
1307 trampolines[i] = panfrost_get_tex_desc(batch, stage, view);
1308 }
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001309
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04001310 postfix->textures = panfrost_pool_upload(&batch->pool,
Tomeu Vizosoe41894b2020-04-17 14:23:49 +02001311 trampolines,
1312 sizeof(uint64_t) *
1313 ctx->sampler_view_count[stage]);
1314 }
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001315}
1316
1317void
1318panfrost_emit_sampler_descriptors(struct panfrost_batch *batch,
1319 enum pipe_shader_type stage,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02001320 struct mali_vertex_tiler_postfix *postfix)
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001321{
1322 struct panfrost_context *ctx = batch->ctx;
1323
1324 if (!ctx->sampler_count[stage])
1325 return;
1326
Alyssa Rosenzweigb10c3c82020-08-11 18:25:03 -04001327 size_t desc_size = MALI_BIFROST_SAMPLER_LENGTH;
1328 assert(MALI_BIFROST_SAMPLER_LENGTH == MALI_MIDGARD_SAMPLER_LENGTH);
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001329
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -04001330 size_t sz = desc_size * ctx->sampler_count[stage];
1331 struct panfrost_transfer T = panfrost_pool_alloc(&batch->pool, sz);
1332 struct mali_midgard_sampler_packed *out = (struct mali_midgard_sampler_packed *) T.cpu;
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001333
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -04001334 for (unsigned i = 0; i < ctx->sampler_count[stage]; ++i)
1335 out[i] = ctx->samplers[stage][i]->hw;
Tomeu Vizosod3eb23a2020-04-17 14:23:39 +02001336
Alyssa Rosenzweigf74186b2020-08-11 18:23:12 -04001337 postfix->sampler_descriptor = T.gpu;
Boris Brezillon8e0a08b2020-03-05 18:43:13 +01001338}
Boris Brezillon528384c2020-03-05 18:53:08 +01001339
1340void
Boris Brezillonb95530b2020-03-06 09:09:03 +01001341panfrost_emit_vertex_data(struct panfrost_batch *batch,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02001342 struct mali_vertex_tiler_postfix *vertex_postfix)
Boris Brezillonb95530b2020-03-06 09:09:03 +01001343{
1344 struct panfrost_context *ctx = batch->ctx;
1345 struct panfrost_vertex_state *so = ctx->vertex;
1346
Alyssa Rosenzweige9dc2252020-08-14 12:25:18 -04001347 unsigned instance_shift = vertex_postfix->instance_shift;
1348 unsigned instance_odd = vertex_postfix->instance_odd;
1349
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001350 /* Worst case: everything is NPOT */
1351
1352 struct panfrost_transfer S = panfrost_pool_alloc(&batch->pool,
1353 MALI_ATTRIBUTE_LENGTH * PIPE_MAX_ATTRIBS * 2);
1354
1355 struct panfrost_transfer T = panfrost_pool_alloc(&batch->pool,
1356 MALI_ATTRIBUTE_LENGTH * (PAN_INSTANCE_ID + 1));
1357
1358 struct mali_attribute_buffer_packed *bufs =
1359 (struct mali_attribute_buffer_packed *) S.cpu;
1360
1361 struct mali_attribute_packed *out =
1362 (struct mali_attribute_packed *) T.cpu;
1363
Alyssa Rosenzweig8236fa32020-08-14 12:09:05 -04001364 unsigned attrib_to_buffer[PIPE_MAX_ATTRIBS] = { 0 };
Boris Brezillonb95530b2020-03-06 09:09:03 +01001365 unsigned k = 0;
1366
1367 for (unsigned i = 0; i < so->num_elements; ++i) {
Alyssa Rosenzweigd5a264f2020-08-14 15:24:35 -04001368 /* We map buffers 1:1 with the attributes, which
Boris Brezillonb95530b2020-03-06 09:09:03 +01001369 * means duplicating some vertex buffers (who cares? aside from
1370 * maybe some caching implications but I somehow doubt that
1371 * matters) */
1372
1373 struct pipe_vertex_element *elem = &so->pipe[i];
1374 unsigned vbi = elem->vertex_buffer_index;
Alyssa Rosenzweig8236fa32020-08-14 12:09:05 -04001375 attrib_to_buffer[i] = k;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001376
1377 if (!(ctx->vb_mask & (1 << vbi)))
1378 continue;
1379
1380 struct pipe_vertex_buffer *buf = &ctx->vertex_buffers[vbi];
1381 struct panfrost_resource *rsrc;
1382
1383 rsrc = pan_resource(buf->buffer.resource);
1384 if (!rsrc)
1385 continue;
1386
Boris Brezillonb95530b2020-03-06 09:09:03 +01001387 /* Add a dependency of the batch on the vertex buffer */
1388 panfrost_batch_add_bo(batch, rsrc->bo,
1389 PAN_BO_ACCESS_SHARED |
1390 PAN_BO_ACCESS_READ |
1391 PAN_BO_ACCESS_VERTEX_TILER);
1392
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001393 /* Mask off lower bits, see offset fixup below */
1394 mali_ptr raw_addr = rsrc->bo->gpu + buf->buffer_offset;
1395 mali_ptr addr = raw_addr & ~63;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001396
1397 /* Since we advanced the base pointer, we shrink the buffer
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001398 * size, but add the offset we subtracted */
1399 unsigned size = rsrc->base.width0 + (raw_addr - addr)
1400 - buf->buffer_offset;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001401
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001402 /* When there is a divisor, the hardware-level divisor is
1403 * the product of the instance divisor and the padded count */
Boris Brezillonb95530b2020-03-06 09:09:03 +01001404 unsigned divisor = elem->instance_divisor;
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001405 unsigned hw_divisor = ctx->padded_count * divisor;
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001406 unsigned stride = buf->stride;
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001407
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001408 /* If there's a divisor(=1) but no instancing, we want every
1409 * attribute to be the same */
Boris Brezillonb95530b2020-03-06 09:09:03 +01001410
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001411 if (divisor && ctx->instance_count == 1)
1412 stride = 0;
1413
1414 if (!divisor || ctx->instance_count <= 1) {
1415 pan_pack(bufs + k, ATTRIBUTE_BUFFER, cfg) {
1416 if (ctx->instance_count > 1)
1417 cfg.type = MALI_ATTRIBUTE_TYPE_1D_MODULUS;
1418
1419 cfg.pointer = addr;
1420 cfg.stride = stride;
1421 cfg.size = size;
1422 cfg.divisor_r = instance_shift;
1423 cfg.divisor_p = instance_odd;
1424 }
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001425 } else if (util_is_power_of_two_or_zero(hw_divisor)) {
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001426 pan_pack(bufs + k, ATTRIBUTE_BUFFER, cfg) {
1427 cfg.type = MALI_ATTRIBUTE_TYPE_1D_POT_DIVISOR;
1428 cfg.pointer = addr;
1429 cfg.stride = stride;
1430 cfg.size = size;
1431 cfg.divisor_r = __builtin_ctz(hw_divisor);
1432 }
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001433
Boris Brezillonb95530b2020-03-06 09:09:03 +01001434 } else {
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001435 unsigned shift = 0, extra_flags = 0;
1436
1437 unsigned magic_divisor =
1438 panfrost_compute_magic_divisor(hw_divisor, &shift, &extra_flags);
1439
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001440 pan_pack(bufs + k, ATTRIBUTE_BUFFER, cfg) {
1441 cfg.type = MALI_ATTRIBUTE_TYPE_1D_NPOT_DIVISOR;
1442 cfg.pointer = addr;
1443 cfg.stride = stride;
1444 cfg.size = size;
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001445
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001446 cfg.divisor_r = shift;
1447 cfg.divisor_e = extra_flags;
1448 }
Alyssa Rosenzweigc9bb5dc2020-08-14 12:29:57 -04001449
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001450 pan_pack(bufs + k + 1, ATTRIBUTE_BUFFER_CONTINUATION_NPOT, cfg) {
1451 cfg.divisor_numerator = magic_divisor;
1452 cfg.divisor = divisor;
1453 }
1454
1455 ++k;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001456 }
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001457
1458 ++k;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001459 }
1460
1461 /* Add special gl_VertexID/gl_InstanceID buffers */
1462
Alyssa Rosenzweig44397c52020-08-14 13:27:51 -04001463 panfrost_vertex_id(ctx->padded_count, &bufs[k], ctx->instance_count > 1);
Alyssa Rosenzweig27f8b872020-08-14 12:19:10 -04001464
1465 pan_pack(out + PAN_VERTEX_ID, ATTRIBUTE, cfg) {
1466 cfg.buffer_index = k++;
1467 cfg.format = so->formats[PAN_VERTEX_ID];
1468 }
Alyssa Rosenzweig6caf7892020-08-14 12:14:20 -04001469
Alyssa Rosenzweig44397c52020-08-14 13:27:51 -04001470 panfrost_instance_id(ctx->padded_count, &bufs[k], ctx->instance_count > 1);
Alyssa Rosenzweig27f8b872020-08-14 12:19:10 -04001471
1472 pan_pack(out + PAN_INSTANCE_ID, ATTRIBUTE, cfg) {
1473 cfg.buffer_index = k++;
1474 cfg.format = so->formats[PAN_INSTANCE_ID];
1475 }
Boris Brezillonb95530b2020-03-06 09:09:03 +01001476
Alyssa Rosenzweig9cc81ea2020-08-14 12:01:55 -04001477 /* Attribute addresses require 64-byte alignment, so let:
Alyssa Rosenzweig76de3e62020-08-13 14:32:23 -04001478 *
1479 * base' = base & ~63 = base - (base & 63)
Alyssa Rosenzweig9cc81ea2020-08-14 12:01:55 -04001480 * offset' = offset + (base & 63)
Alyssa Rosenzweig76de3e62020-08-13 14:32:23 -04001481 *
Alyssa Rosenzweig9cc81ea2020-08-14 12:01:55 -04001482 * Since base' + offset' = base + offset, these are equivalent
1483 * addressing modes and now base is 64 aligned.
Alyssa Rosenzweig76de3e62020-08-13 14:32:23 -04001484 */
1485
1486 unsigned start = vertex_postfix->offset_start;
1487
1488 for (unsigned i = 0; i < so->num_elements; ++i) {
1489 unsigned vbi = so->pipe[i].vertex_buffer_index;
1490 struct pipe_vertex_buffer *buf = &ctx->vertex_buffers[vbi];
1491
1492 /* Adjust by the masked off bits of the offset. Make sure we
1493 * read src_offset from so->hw (which is not GPU visible)
1494 * rather than target (which is) due to caching effects */
1495
1496 unsigned src_offset = so->pipe[i].src_offset;
1497
1498 /* BOs aligned to 4k so guaranteed aligned to 64 */
1499 src_offset += (buf->buffer_offset & 63);
1500
1501 /* Also, somewhat obscurely per-instance data needs to be
1502 * offset in response to a delayed start in an indexed draw */
1503
1504 if (so->pipe[i].instance_divisor && ctx->instance_count > 1 && start)
1505 src_offset -= buf->stride * start;
1506
Alyssa Rosenzweig27f8b872020-08-14 12:19:10 -04001507 pan_pack(out + i, ATTRIBUTE, cfg) {
1508 cfg.buffer_index = attrib_to_buffer[i];
1509 cfg.format = so->formats[i];
1510 cfg.offset = src_offset;
1511 }
Alyssa Rosenzweig76de3e62020-08-13 14:32:23 -04001512 }
1513
Alyssa Rosenzweige646c862020-08-14 12:51:36 -04001514 vertex_postfix->attributes = S.gpu;
Alyssa Rosenzweig27f8b872020-08-14 12:19:10 -04001515 vertex_postfix->attribute_meta = T.gpu;
Boris Brezillonb95530b2020-03-06 09:09:03 +01001516}
1517
Boris Brezillon836686d2020-03-06 09:45:31 +01001518static mali_ptr
1519panfrost_emit_varyings(struct panfrost_batch *batch, union mali_attr *slot,
1520 unsigned stride, unsigned count)
1521{
1522 /* Fill out the descriptor */
1523 slot->stride = stride;
1524 slot->size = stride * count;
1525 slot->shift = slot->extra_flags = 0;
1526
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04001527 struct panfrost_transfer transfer = panfrost_pool_alloc(&batch->pool,
Boris Brezillon836686d2020-03-06 09:45:31 +01001528 slot->size);
1529
1530 slot->elements = transfer.gpu | MALI_ATTR_LINEAR;
1531
1532 return transfer.gpu;
1533}
1534
Alyssa Rosenzweige26ac2e2020-06-10 19:28:28 -04001535static unsigned
1536panfrost_streamout_offset(unsigned stride, unsigned offset,
1537 struct pipe_stream_output_target *target)
1538{
1539 return (target->buffer_offset + (offset * stride * 4)) & 63;
1540}
1541
Boris Brezillon836686d2020-03-06 09:45:31 +01001542static void
1543panfrost_emit_streamout(struct panfrost_batch *batch, union mali_attr *slot,
1544 unsigned stride, unsigned offset, unsigned count,
1545 struct pipe_stream_output_target *target)
1546{
1547 /* Fill out the descriptor */
1548 slot->stride = stride * 4;
1549 slot->shift = slot->extra_flags = 0;
1550
1551 unsigned max_size = target->buffer_size;
1552 unsigned expected_size = slot->stride * count;
1553
Boris Brezillon836686d2020-03-06 09:45:31 +01001554 /* Grab the BO and bind it to the batch */
1555 struct panfrost_bo *bo = pan_resource(target->buffer)->bo;
1556
1557 /* Varyings are WRITE from the perspective of the VERTEX but READ from
1558 * the perspective of the TILER and FRAGMENT.
1559 */
1560 panfrost_batch_add_bo(batch, bo,
1561 PAN_BO_ACCESS_SHARED |
1562 PAN_BO_ACCESS_RW |
1563 PAN_BO_ACCESS_VERTEX_TILER |
1564 PAN_BO_ACCESS_FRAGMENT);
1565
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001566 /* We will have an offset applied to get alignment */
Boris Brezillon836686d2020-03-06 09:45:31 +01001567 mali_ptr addr = bo->gpu + target->buffer_offset + (offset * slot->stride);
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001568 slot->elements = (addr & ~63) | MALI_ATTR_LINEAR;
1569 slot->size = MIN2(max_size, expected_size) + (addr & 63);
Boris Brezillon836686d2020-03-06 09:45:31 +01001570}
1571
Boris Brezillon836686d2020-03-06 09:45:31 +01001572static bool
1573has_point_coord(unsigned mask, gl_varying_slot loc)
1574{
1575 if ((loc >= VARYING_SLOT_TEX0) && (loc <= VARYING_SLOT_TEX7))
1576 return (mask & (1 << (loc - VARYING_SLOT_TEX0)));
1577 else if (loc == VARYING_SLOT_PNTC)
1578 return (mask & (1 << 8));
1579 else
1580 return false;
1581}
1582
1583/* Helpers for manipulating stream out information so we can pack varyings
1584 * accordingly. Compute the src_offset for a given captured varying */
1585
1586static struct pipe_stream_output *
1587pan_get_so(struct pipe_stream_output_info *info, gl_varying_slot loc)
1588{
1589 for (unsigned i = 0; i < info->num_outputs; ++i) {
1590 if (info->output[i].register_index == loc)
1591 return &info->output[i];
1592 }
1593
1594 unreachable("Varying not captured");
1595}
1596
Alyssa Rosenzweig24c3b952020-06-10 15:35:41 -04001597static unsigned
1598pan_varying_size(enum mali_format fmt)
1599{
1600 unsigned type = MALI_EXTRACT_TYPE(fmt);
1601 unsigned chan = MALI_EXTRACT_CHANNELS(fmt);
1602 unsigned bits = MALI_EXTRACT_BITS(fmt);
1603 unsigned bpc = 0;
1604
1605 if (bits == MALI_CHANNEL_FLOAT) {
1606 /* No doubles */
1607 bool fp16 = (type == MALI_FORMAT_SINT);
1608 assert(fp16 || (type == MALI_FORMAT_UNORM));
1609
1610 bpc = fp16 ? 2 : 4;
1611 } else {
1612 assert(type >= MALI_FORMAT_SNORM && type <= MALI_FORMAT_SINT);
1613
1614 /* See the enums */
1615 bits = 1 << bits;
1616 assert(bits >= 8);
1617 bpc = bits / 8;
1618 }
1619
1620 return bpc * chan;
1621}
1622
Alyssa Rosenzweig258b80b2020-06-08 12:56:33 -04001623/* Indices for named (non-XFB) varyings that are present. These are packed
1624 * tightly so they correspond to a bitfield present (P) indexed by (1 <<
1625 * PAN_VARY_*). This has the nice property that you can lookup the buffer index
1626 * of a given special field given a shift S by:
1627 *
1628 * idx = popcount(P & ((1 << S) - 1))
1629 *
1630 * That is... look at all of the varyings that come earlier and count them, the
1631 * count is the new index since plus one. Likewise, the total number of special
1632 * buffers required is simply popcount(P)
1633 */
1634
1635enum pan_special_varying {
1636 PAN_VARY_GENERAL = 0,
1637 PAN_VARY_POSITION = 1,
1638 PAN_VARY_PSIZ = 2,
1639 PAN_VARY_PNTCOORD = 3,
1640 PAN_VARY_FACE = 4,
1641 PAN_VARY_FRAGCOORD = 5,
1642
1643 /* Keep last */
1644 PAN_VARY_MAX,
1645};
1646
1647/* Given a varying, figure out which index it correpsonds to */
1648
1649static inline unsigned
1650pan_varying_index(unsigned present, enum pan_special_varying v)
1651{
1652 unsigned mask = (1 << v) - 1;
1653 return util_bitcount(present & mask);
1654}
1655
1656/* Get the base offset for XFB buffers, which by convention come after
1657 * everything else. Wrapper function for semantic reasons; by construction this
1658 * is just popcount. */
1659
1660static inline unsigned
1661pan_xfb_base(unsigned present)
1662{
1663 return util_bitcount(present);
1664}
1665
Alyssa Rosenzweig3d04ebf2020-06-08 13:32:38 -04001666/* Computes the present mask for varyings so we can start emitting varying records */
1667
1668static inline unsigned
1669pan_varying_present(
1670 struct panfrost_shader_state *vs,
1671 struct panfrost_shader_state *fs,
1672 unsigned quirks)
1673{
1674 /* At the moment we always emit general and position buffers. Not
1675 * strictly necessary but usually harmless */
1676
1677 unsigned present = (1 << PAN_VARY_GENERAL) | (1 << PAN_VARY_POSITION);
1678
1679 /* Enable special buffers by the shader info */
1680
1681 if (vs->writes_point_size)
1682 present |= (1 << PAN_VARY_PSIZ);
1683
1684 if (fs->reads_point_coord)
1685 present |= (1 << PAN_VARY_PNTCOORD);
1686
1687 if (fs->reads_face)
1688 present |= (1 << PAN_VARY_FACE);
1689
1690 if (fs->reads_frag_coord && !(quirks & IS_BIFROST))
1691 present |= (1 << PAN_VARY_FRAGCOORD);
1692
1693 /* Also, if we have a point sprite, we need a point coord buffer */
1694
1695 for (unsigned i = 0; i < fs->varying_count; i++) {
1696 gl_varying_slot loc = fs->varyings_loc[i];
1697
1698 if (has_point_coord(fs->point_sprite_mask, loc))
1699 present |= (1 << PAN_VARY_PNTCOORD);
1700 }
1701
1702 return present;
1703}
1704
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001705/* Emitters for varying records */
1706
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001707static void
1708pan_emit_vary(struct mali_attribute_packed *out,
1709 unsigned present, enum pan_special_varying buf,
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001710 unsigned quirks, enum mali_format format,
1711 unsigned offset)
1712{
1713 unsigned nr_channels = MALI_EXTRACT_CHANNELS(format);
Alyssa Rosenzweig668ec242020-08-11 22:26:03 -04001714 unsigned swizzle = quirks & HAS_SWIZZLES ?
1715 panfrost_get_default_swizzle(nr_channels) :
1716 panfrost_bifrost_swizzle(nr_channels);
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001717
Alyssa Rosenzweig59fa2692020-08-14 15:23:10 -04001718 pan_pack(out, ATTRIBUTE, cfg) {
1719 cfg.buffer_index = pan_varying_index(present, buf);
1720 cfg.unknown = quirks & IS_BIFROST ? 0x0 : 0x1;
1721 cfg.format = (format << 12) | swizzle;
1722 cfg.offset = offset;
1723 }
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001724}
1725
1726/* General varying that is unused */
1727
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001728static void
1729pan_emit_vary_only(struct mali_attribute_packed *out,
1730 unsigned present, unsigned quirks)
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001731{
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001732 pan_emit_vary(out, present, 0, quirks, MALI_VARYING_DISCARD, 0);
Alyssa Rosenzweig0c0217d2020-06-08 13:45:17 -04001733}
1734
Alyssa Rosenzweigdf242092020-06-08 13:52:38 -04001735/* Special records */
1736
1737static const enum mali_format pan_varying_formats[PAN_VARY_MAX] = {
1738 [PAN_VARY_POSITION] = MALI_VARYING_POS,
1739 [PAN_VARY_PSIZ] = MALI_R16F,
1740 [PAN_VARY_PNTCOORD] = MALI_R16F,
1741 [PAN_VARY_FACE] = MALI_R32I,
1742 [PAN_VARY_FRAGCOORD] = MALI_RGBA32F
1743};
1744
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001745static void
1746pan_emit_vary_special(struct mali_attribute_packed *out,
1747 unsigned present, enum pan_special_varying buf,
Alyssa Rosenzweigdf242092020-06-08 13:52:38 -04001748 unsigned quirks)
1749{
1750 assert(buf < PAN_VARY_MAX);
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001751 pan_emit_vary(out, present, buf, quirks, pan_varying_formats[buf], 0);
Alyssa Rosenzweigdf242092020-06-08 13:52:38 -04001752}
1753
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001754static enum mali_format
1755pan_xfb_format(enum mali_format format, unsigned nr)
1756{
1757 if (MALI_EXTRACT_BITS(format) == MALI_CHANNEL_FLOAT)
1758 return MALI_R32F | MALI_NR_CHANNELS(nr);
1759 else
1760 return MALI_EXTRACT_TYPE(format) | MALI_NR_CHANNELS(nr) | MALI_CHANNEL_32;
1761}
1762
1763/* Transform feedback records. Note struct pipe_stream_output is (if packed as
1764 * a bitfield) 32-bit, smaller than a 64-bit pointer, so may as well pass by
1765 * value. */
1766
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001767static void
1768pan_emit_vary_xfb(struct mali_attribute_packed *out,
1769 unsigned present,
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001770 unsigned max_xfb,
1771 unsigned *streamout_offsets,
1772 unsigned quirks,
1773 enum mali_format format,
1774 struct pipe_stream_output o)
1775{
Alyssa Rosenzweig668ec242020-08-11 22:26:03 -04001776 unsigned swizzle = quirks & HAS_SWIZZLES ?
1777 panfrost_get_default_swizzle(o.num_components) :
1778 panfrost_bifrost_swizzle(o.num_components);
1779
Alyssa Rosenzweig6c850632020-08-14 15:21:20 -04001780 pan_pack(out, ATTRIBUTE, cfg) {
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001781 /* XFB buffers come after everything else */
Alyssa Rosenzweig6c850632020-08-14 15:21:20 -04001782 cfg.buffer_index = pan_xfb_base(present) + o.output_buffer;
1783 cfg.unknown = quirks & IS_BIFROST ? 0x0 : 0x1;
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001784
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001785 /* Override number of channels and precision to highp */
Alyssa Rosenzweig6c850632020-08-14 15:21:20 -04001786 cfg.format = (pan_xfb_format(format, o.num_components) << 12) | swizzle;
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001787
1788 /* Apply given offsets together */
Alyssa Rosenzweig6c850632020-08-14 15:21:20 -04001789 cfg.offset = (o.dst_offset * 4) /* dwords */
1790 + streamout_offsets[o.output_buffer];
1791 }
Alyssa Rosenzweigc31af6f2020-06-08 14:08:45 -04001792}
1793
Alyssa Rosenzweige9e9b2b2020-06-10 15:13:12 -04001794/* Determine if we should capture a varying for XFB. This requires actually
1795 * having a buffer for it. If we don't capture it, we'll fallback to a general
1796 * varying path (linked or unlinked, possibly discarding the write) */
1797
1798static bool
1799panfrost_xfb_captured(struct panfrost_shader_state *xfb,
1800 unsigned loc, unsigned max_xfb)
1801{
1802 if (!(xfb->so_mask & (1ll << loc)))
1803 return false;
1804
1805 struct pipe_stream_output *o = pan_get_so(&xfb->stream_output, loc);
1806 return o->output_buffer < max_xfb;
1807}
1808
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001809static void
Alyssa Rosenzweig40b4ee92020-08-14 15:19:25 -04001810pan_emit_general_varying(struct mali_attribute_packed *out,
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001811 struct panfrost_shader_state *other,
1812 struct panfrost_shader_state *xfb,
Alyssa Rosenzweig40b4ee92020-08-14 15:19:25 -04001813 gl_varying_slot loc,
1814 enum mali_format format,
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001815 unsigned present,
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001816 unsigned quirks,
1817 unsigned *gen_offsets,
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001818 enum mali_format *gen_formats,
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001819 unsigned *gen_stride,
1820 unsigned idx,
Alyssa Rosenzweig40b4ee92020-08-14 15:19:25 -04001821 bool should_alloc)
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001822{
Alyssa Rosenzweig40b4ee92020-08-14 15:19:25 -04001823 /* Check if we're linked */
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001824 signed other_idx = -1;
1825
1826 for (unsigned j = 0; j < other->varying_count; ++j) {
1827 if (other->varyings_loc[j] == loc) {
1828 other_idx = j;
1829 break;
1830 }
1831 }
1832
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001833 if (other_idx < 0) {
1834 pan_emit_vary_only(out, present, quirks);
1835 return;
1836 }
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001837
1838 unsigned offset = gen_offsets[other_idx];
1839
1840 if (should_alloc) {
1841 /* We're linked, so allocate a space via a watermark allocation */
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001842 enum mali_format alt = other->varyings[other_idx];
1843
1844 /* Do interpolation at minimum precision */
1845 unsigned size_main = pan_varying_size(format);
1846 unsigned size_alt = pan_varying_size(alt);
1847 unsigned size = MIN2(size_main, size_alt);
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001848
1849 /* If a varying is marked for XFB but not actually captured, we
1850 * should match the format to the format that would otherwise
1851 * be used for XFB, since dEQP checks for invariance here. It's
1852 * unclear if this is required by the spec. */
1853
1854 if (xfb->so_mask & (1ull << loc)) {
1855 struct pipe_stream_output *o = pan_get_so(&xfb->stream_output, loc);
1856 format = pan_xfb_format(format, o->num_components);
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001857 size = pan_varying_size(format);
1858 } else if (size == size_alt) {
1859 format = alt;
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001860 }
1861
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001862 gen_offsets[idx] = *gen_stride;
1863 gen_formats[other_idx] = format;
1864 offset = *gen_stride;
1865 *gen_stride += size;
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001866 }
1867
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001868 pan_emit_vary(out, present, PAN_VARY_GENERAL, quirks, format, offset);
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001869}
1870
Alyssa Rosenzweig40b4ee92020-08-14 15:19:25 -04001871/* Higher-level wrapper around all of the above, classifying a varying into one
1872 * of the above types */
1873
1874static void
1875panfrost_emit_varying(
1876 struct mali_attribute_packed *out,
1877 struct panfrost_shader_state *stage,
1878 struct panfrost_shader_state *other,
1879 struct panfrost_shader_state *xfb,
1880 unsigned present,
1881 unsigned max_xfb,
1882 unsigned *streamout_offsets,
1883 unsigned quirks,
1884 unsigned *gen_offsets,
1885 enum mali_format *gen_formats,
1886 unsigned *gen_stride,
1887 unsigned idx,
1888 bool should_alloc,
1889 bool is_fragment)
1890{
1891 gl_varying_slot loc = stage->varyings_loc[idx];
1892 enum mali_format format = stage->varyings[idx];
1893
1894 /* Override format to match linkage */
1895 if (!should_alloc && gen_formats[idx])
1896 format = gen_formats[idx];
1897
1898 if (has_point_coord(stage->point_sprite_mask, loc)) {
1899 pan_emit_vary_special(out, present, PAN_VARY_PNTCOORD, quirks);
1900 } else if (panfrost_xfb_captured(xfb, loc, max_xfb)) {
1901 struct pipe_stream_output *o = pan_get_so(&xfb->stream_output, loc);
1902 pan_emit_vary_xfb(out, present, max_xfb, streamout_offsets, quirks, format, *o);
1903 } else if (loc == VARYING_SLOT_POS) {
1904 if (is_fragment)
1905 pan_emit_vary_special(out, present, PAN_VARY_FRAGCOORD, quirks);
1906 else
1907 pan_emit_vary_special(out, present, PAN_VARY_POSITION, quirks);
1908 } else if (loc == VARYING_SLOT_PSIZ) {
1909 pan_emit_vary_special(out, present, PAN_VARY_PSIZ, quirks);
1910 } else if (loc == VARYING_SLOT_PNTC) {
1911 pan_emit_vary_special(out, present, PAN_VARY_PNTCOORD, quirks);
1912 } else if (loc == VARYING_SLOT_FACE) {
1913 pan_emit_vary_special(out, present, PAN_VARY_FACE, quirks);
1914 } else {
1915 pan_emit_general_varying(out, other, xfb, loc, format, present,
1916 quirks, gen_offsets, gen_formats, gen_stride,
1917 idx, should_alloc);
1918 }
1919}
1920
Alyssa Rosenzweig6ab87c52020-06-08 15:29:05 -04001921static void
1922pan_emit_special_input(union mali_attr *varyings,
1923 unsigned present,
1924 enum pan_special_varying v,
1925 mali_ptr addr)
1926{
1927 if (present & (1 << v)) {
1928 /* Ensure we write exactly once for performance and with fields
1929 * zeroed appropriately to avoid flakes */
1930
1931 union mali_attr s = {
1932 .elements = addr
1933 };
1934
1935 varyings[pan_varying_index(present, v)] = s;
1936 }
1937}
1938
Boris Brezillon836686d2020-03-06 09:45:31 +01001939void
1940panfrost_emit_varying_descriptor(struct panfrost_batch *batch,
1941 unsigned vertex_count,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02001942 struct mali_vertex_tiler_postfix *vertex_postfix,
1943 struct mali_vertex_tiler_postfix *tiler_postfix,
1944 union midgard_primitive_size *primitive_size)
Boris Brezillon836686d2020-03-06 09:45:31 +01001945{
1946 /* Load the shaders */
1947 struct panfrost_context *ctx = batch->ctx;
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001948 struct panfrost_device *dev = pan_device(ctx->base.screen);
Boris Brezillon836686d2020-03-06 09:45:31 +01001949 struct panfrost_shader_state *vs, *fs;
Boris Brezillon836686d2020-03-06 09:45:31 +01001950 size_t vs_size, fs_size;
1951
1952 /* Allocate the varying descriptor */
1953
1954 vs = panfrost_get_shader_state(ctx, PIPE_SHADER_VERTEX);
1955 fs = panfrost_get_shader_state(ctx, PIPE_SHADER_FRAGMENT);
1956 vs_size = sizeof(struct mali_attr_meta) * vs->varying_count;
1957 fs_size = sizeof(struct mali_attr_meta) * fs->varying_count;
1958
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04001959 struct panfrost_transfer trans = panfrost_pool_alloc(&batch->pool,
Boris Brezillon836686d2020-03-06 09:45:31 +01001960 vs_size +
1961 fs_size);
1962
1963 struct pipe_stream_output_info *so = &vs->stream_output;
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001964 unsigned present = pan_varying_present(vs, fs, dev->quirks);
Boris Brezillon836686d2020-03-06 09:45:31 +01001965
1966 /* Check if this varying is linked by us. This is the case for
1967 * general-purpose, non-captured varyings. If it is, link it. If it's
1968 * not, use the provided stream out information to determine the
1969 * offset, since it was already linked for us. */
1970
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001971 unsigned gen_offsets[32];
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001972 enum mali_format gen_formats[32];
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001973 memset(gen_offsets, 0, sizeof(gen_offsets));
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001974 memset(gen_formats, 0, sizeof(gen_formats));
Boris Brezillon836686d2020-03-06 09:45:31 +01001975
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001976 unsigned gen_stride = 0;
1977 assert(vs->varying_count < ARRAY_SIZE(gen_offsets));
1978 assert(fs->varying_count < ARRAY_SIZE(gen_offsets));
Boris Brezillon836686d2020-03-06 09:45:31 +01001979
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001980 unsigned streamout_offsets[32];
Boris Brezillon836686d2020-03-06 09:45:31 +01001981
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001982 for (unsigned i = 0; i < ctx->streamout.num_targets; ++i) {
1983 streamout_offsets[i] = panfrost_streamout_offset(
1984 so->stride[i],
1985 ctx->streamout.offsets[i],
1986 ctx->streamout.targets[i]);
Boris Brezillon836686d2020-03-06 09:45:31 +01001987 }
1988
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001989 struct mali_attribute_packed *ovs = (struct mali_attribute_packed *)trans.cpu;
1990 struct mali_attribute_packed *ofs = ovs + vs->varying_count;
Boris Brezillon836686d2020-03-06 09:45:31 +01001991
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001992 for (unsigned i = 0; i < vs->varying_count; i++) {
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04001993 panfrost_emit_varying(ovs + i, vs, fs, vs, present,
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001994 ctx->streamout.num_targets, streamout_offsets,
1995 dev->quirks,
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04001996 gen_offsets, gen_formats, &gen_stride, i, true, false);
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04001997 }
Boris Brezillon836686d2020-03-06 09:45:31 +01001998
1999 for (unsigned i = 0; i < fs->varying_count; i++) {
Alyssa Rosenzweigb805cf92020-08-14 15:12:39 -04002000 panfrost_emit_varying(ofs + i, fs, vs, vs, present,
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002001 ctx->streamout.num_targets, streamout_offsets,
2002 dev->quirks,
Alyssa Rosenzweiga7f52462020-06-08 18:11:29 -04002003 gen_offsets, gen_formats, &gen_stride, i, false, true);
Boris Brezillon836686d2020-03-06 09:45:31 +01002004 }
2005
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002006 unsigned xfb_base = pan_xfb_base(present);
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04002007 struct panfrost_transfer T = panfrost_pool_alloc(&batch->pool,
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002008 sizeof(union mali_attr) * (xfb_base + ctx->streamout.num_targets));
2009 union mali_attr *varyings = (union mali_attr *) T.cpu;
Boris Brezillon836686d2020-03-06 09:45:31 +01002010
2011 /* Emit the stream out buffers */
2012
2013 unsigned out_count = u_stream_outputs_for_vertices(ctx->active_prim,
2014 ctx->vertex_count);
2015
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002016 for (unsigned i = 0; i < ctx->streamout.num_targets; ++i) {
2017 panfrost_emit_streamout(batch, &varyings[xfb_base + i],
2018 so->stride[i],
2019 ctx->streamout.offsets[i],
2020 out_count,
2021 ctx->streamout.targets[i]);
Boris Brezillon836686d2020-03-06 09:45:31 +01002022 }
2023
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002024 panfrost_emit_varyings(batch,
2025 &varyings[pan_varying_index(present, PAN_VARY_GENERAL)],
2026 gen_stride, vertex_count);
Boris Brezillon836686d2020-03-06 09:45:31 +01002027
2028 /* fp32 vec4 gl_Position */
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002029 tiler_postfix->position_varying = panfrost_emit_varyings(batch,
2030 &varyings[pan_varying_index(present, PAN_VARY_POSITION)],
2031 sizeof(float) * 4, vertex_count);
Boris Brezillon836686d2020-03-06 09:45:31 +01002032
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002033 if (present & (1 << PAN_VARY_PSIZ)) {
2034 primitive_size->pointer = panfrost_emit_varyings(batch,
2035 &varyings[pan_varying_index(present, PAN_VARY_PSIZ)],
2036 2, vertex_count);
Boris Brezillon836686d2020-03-06 09:45:31 +01002037 }
2038
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002039 pan_emit_special_input(varyings, present, PAN_VARY_PNTCOORD, MALI_VARYING_POINT_COORD);
2040 pan_emit_special_input(varyings, present, PAN_VARY_FACE, MALI_VARYING_FRONT_FACING);
2041 pan_emit_special_input(varyings, present, PAN_VARY_FRAGCOORD, MALI_VARYING_FRAG_COORD);
Boris Brezillon836686d2020-03-06 09:45:31 +01002042
Alyssa Rosenzweig79e349a2020-06-04 15:45:34 -04002043 vertex_postfix->varyings = T.gpu;
2044 tiler_postfix->varyings = T.gpu;
Boris Brezillon836686d2020-03-06 09:45:31 +01002045
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002046 vertex_postfix->varying_meta = trans.gpu;
2047 tiler_postfix->varying_meta = trans.gpu + vs_size;
Boris Brezillon836686d2020-03-06 09:45:31 +01002048}
2049
Boris Brezillonb95530b2020-03-06 09:09:03 +01002050void
Boris Brezillon528384c2020-03-05 18:53:08 +01002051panfrost_emit_vertex_tiler_jobs(struct panfrost_batch *batch,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002052 struct mali_vertex_tiler_prefix *vertex_prefix,
2053 struct mali_vertex_tiler_postfix *vertex_postfix,
2054 struct mali_vertex_tiler_prefix *tiler_prefix,
2055 struct mali_vertex_tiler_postfix *tiler_postfix,
2056 union midgard_primitive_size *primitive_size)
Boris Brezillon528384c2020-03-05 18:53:08 +01002057{
2058 struct panfrost_context *ctx = batch->ctx;
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002059 struct panfrost_device *device = pan_device(ctx->base.screen);
Alyssa Rosenzweig31197c22020-07-07 17:07:34 -04002060 bool wallpapering = ctx->wallpaper_batch && batch->scoreboard.tiler_dep;
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002061 struct bifrost_payload_vertex bifrost_vertex = {0,};
2062 struct bifrost_payload_tiler bifrost_tiler = {0,};
2063 struct midgard_payload_vertex_tiler midgard_vertex = {0,};
2064 struct midgard_payload_vertex_tiler midgard_tiler = {0,};
2065 void *vp, *tp;
2066 size_t vp_size, tp_size;
2067
2068 if (device->quirks & IS_BIFROST) {
2069 bifrost_vertex.prefix = *vertex_prefix;
2070 bifrost_vertex.postfix = *vertex_postfix;
2071 vp = &bifrost_vertex;
2072 vp_size = sizeof(bifrost_vertex);
2073
2074 bifrost_tiler.prefix = *tiler_prefix;
2075 bifrost_tiler.tiler.primitive_size = *primitive_size;
Tomeu Vizoso46e42462020-04-08 15:58:42 +02002076 bifrost_tiler.tiler.tiler_meta = panfrost_batch_get_tiler_meta(batch, ~0);
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002077 bifrost_tiler.postfix = *tiler_postfix;
2078 tp = &bifrost_tiler;
2079 tp_size = sizeof(bifrost_tiler);
2080 } else {
2081 midgard_vertex.prefix = *vertex_prefix;
2082 midgard_vertex.postfix = *vertex_postfix;
2083 vp = &midgard_vertex;
2084 vp_size = sizeof(midgard_vertex);
2085
2086 midgard_tiler.prefix = *tiler_prefix;
2087 midgard_tiler.postfix = *tiler_postfix;
2088 midgard_tiler.primitive_size = *primitive_size;
2089 tp = &midgard_tiler;
2090 tp_size = sizeof(midgard_tiler);
2091 }
Boris Brezillon528384c2020-03-05 18:53:08 +01002092
2093 if (wallpapering) {
2094 /* Inject in reverse order, with "predicted" job indices.
2095 * THIS IS A HACK XXX */
Alyssa Rosenzweig4b7056b2020-08-05 18:40:44 -04002096 panfrost_new_job(&batch->pool, &batch->scoreboard, MALI_JOB_TYPE_TILER, false,
Alyssa Rosenzweig31197c22020-07-07 17:07:34 -04002097 batch->scoreboard.job_index + 2, tp, tp_size, true);
Alyssa Rosenzweig4b7056b2020-08-05 18:40:44 -04002098 panfrost_new_job(&batch->pool, &batch->scoreboard, MALI_JOB_TYPE_VERTEX, false, 0,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002099 vp, vp_size, true);
Boris Brezillon528384c2020-03-05 18:53:08 +01002100 return;
2101 }
2102
2103 /* If rasterizer discard is enable, only submit the vertex */
2104
2105 bool rasterizer_discard = ctx->rasterizer &&
2106 ctx->rasterizer->base.rasterizer_discard;
2107
Alyssa Rosenzweig4b7056b2020-08-05 18:40:44 -04002108 unsigned vertex = panfrost_new_job(&batch->pool, &batch->scoreboard, MALI_JOB_TYPE_VERTEX, false, 0,
Tomeu Vizoso7b10d4e2020-04-08 10:55:28 +02002109 vp, vp_size, false);
Boris Brezillon528384c2020-03-05 18:53:08 +01002110
2111 if (rasterizer_discard)
2112 return;
2113
Alyssa Rosenzweig4b7056b2020-08-05 18:40:44 -04002114 panfrost_new_job(&batch->pool, &batch->scoreboard, MALI_JOB_TYPE_TILER, false, vertex, tp, tp_size,
Boris Brezillon528384c2020-03-05 18:53:08 +01002115 false);
2116}
Alyssa Rosenzweig0a9fa4b2020-04-06 19:44:58 -04002117
2118/* TODO: stop hardcoding this */
2119mali_ptr
2120panfrost_emit_sample_locations(struct panfrost_batch *batch)
2121{
2122 uint16_t locations[] = {
2123 128, 128,
2124 0, 256,
2125 0, 256,
2126 0, 256,
2127 0, 256,
2128 0, 256,
2129 0, 256,
2130 0, 256,
2131 0, 256,
2132 0, 256,
2133 0, 256,
2134 0, 256,
2135 0, 256,
2136 0, 256,
2137 0, 256,
2138 0, 256,
2139 0, 256,
2140 0, 256,
2141 0, 256,
2142 0, 256,
2143 0, 256,
2144 0, 256,
2145 0, 256,
2146 0, 256,
2147 0, 256,
2148 0, 256,
2149 0, 256,
2150 0, 256,
2151 0, 256,
2152 0, 256,
2153 0, 256,
2154 0, 256,
2155 128, 128,
2156 0, 0,
2157 0, 0,
2158 0, 0,
2159 0, 0,
2160 0, 0,
2161 0, 0,
2162 0, 0,
2163 0, 0,
2164 0, 0,
2165 0, 0,
2166 0, 0,
2167 0, 0,
2168 0, 0,
2169 0, 0,
2170 0, 0,
2171 };
2172
Alyssa Rosenzweig8958fbd2020-07-07 15:20:35 -04002173 return panfrost_pool_upload(&batch->pool, locations, 96 * sizeof(uint16_t));
Alyssa Rosenzweig0a9fa4b2020-04-06 19:44:58 -04002174}