blob: 4453799fb6769ddbe5ccf9ddedd1fc5bd6d16c8b [file] [log] [blame]
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
David Howells760285e2012-10-02 18:01:07 +010028#include <drm/drmP.h>
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040029#include "radeon.h"
30#include "evergreend.h"
31#include "evergreen_reg_safe.h"
Alex Deucherc175ca92011-03-02 20:07:37 -050032#include "cayman_reg_safe.h"
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040033
Jerome Glisse285484e2011-12-16 17:03:42 -050034#define MAX(a,b) (((a)>(b))?(a):(b))
35#define MIN(a,b) (((a)<(b))?(a):(b))
36
Grazvydas Ignotas4a985352015-08-23 03:57:35 +030037#define REG_SAFE_BM_SIZE ARRAY_SIZE(evergreen_reg_safe_bm)
38
Alex Deucherd2ead3e2012-12-13 09:55:45 -050039int r600_dma_cs_next_reloc(struct radeon_cs_parser *p,
Christian König1d0c0942014-11-27 14:48:42 +010040 struct radeon_bo_list **cs_reloc);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040041struct evergreen_cs_track {
42 u32 group_size;
43 u32 nbanks;
44 u32 npipes;
Alex Deucherf3a71df2011-11-28 14:49:28 -050045 u32 row_size;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040046 /* value we track */
Marek Olšák747e42a102012-03-19 03:09:36 +010047 u32 nsamples; /* unused */
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040048 struct radeon_bo *cb_color_bo[12];
49 u32 cb_color_bo_offset[12];
Marek Olšák747e42a102012-03-19 03:09:36 +010050 struct radeon_bo *cb_color_fmask_bo[8]; /* unused */
51 struct radeon_bo *cb_color_cmask_bo[8]; /* unused */
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040052 u32 cb_color_info[12];
53 u32 cb_color_view[12];
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040054 u32 cb_color_pitch[12];
55 u32 cb_color_slice[12];
Jerome Glissed2609872012-06-09 10:57:41 -040056 u32 cb_color_slice_idx[12];
Jerome Glisse285484e2011-12-16 17:03:42 -050057 u32 cb_color_attrib[12];
Marek Olšák747e42a102012-03-19 03:09:36 +010058 u32 cb_color_cmask_slice[8];/* unused */
59 u32 cb_color_fmask_slice[8];/* unused */
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040060 u32 cb_target_mask;
Marek Olšák747e42a102012-03-19 03:09:36 +010061 u32 cb_shader_mask; /* unused */
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040062 u32 vgt_strmout_config;
63 u32 vgt_strmout_buffer_config;
Marek Olšákdd220a02012-01-27 12:17:59 -050064 struct radeon_bo *vgt_strmout_bo[4];
Marek Olšákdd220a02012-01-27 12:17:59 -050065 u32 vgt_strmout_bo_offset[4];
66 u32 vgt_strmout_size[4];
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040067 u32 db_depth_control;
68 u32 db_depth_view;
Jerome Glisse285484e2011-12-16 17:03:42 -050069 u32 db_depth_slice;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040070 u32 db_depth_size;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040071 u32 db_z_info;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040072 u32 db_z_read_offset;
73 u32 db_z_write_offset;
74 struct radeon_bo *db_z_read_bo;
75 struct radeon_bo *db_z_write_bo;
76 u32 db_s_info;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040077 u32 db_s_read_offset;
78 u32 db_s_write_offset;
79 struct radeon_bo *db_s_read_bo;
80 struct radeon_bo *db_s_write_bo;
Marek Olšák779923b2012-03-08 00:56:00 +010081 bool sx_misc_kill_all_prims;
Marek Olšák30838572012-03-19 03:09:35 +010082 bool cb_dirty;
83 bool db_dirty;
84 bool streamout_dirty;
Jerome Glisse88f50c82012-03-21 19:18:21 -040085 u32 htile_offset;
86 u32 htile_surface;
87 struct radeon_bo *htile_bo;
Glenn Kennard16613742014-12-13 03:32:37 +010088 unsigned long indirect_draw_buffer_size;
Grazvydas Ignotas4a985352015-08-23 03:57:35 +030089 const unsigned *reg_safe_bm;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -040090};
91
Alex Deucherf3a71df2011-11-28 14:49:28 -050092static u32 evergreen_cs_get_aray_mode(u32 tiling_flags)
93{
94 if (tiling_flags & RADEON_TILING_MACRO)
95 return ARRAY_2D_TILED_THIN1;
96 else if (tiling_flags & RADEON_TILING_MICRO)
97 return ARRAY_1D_TILED_THIN1;
98 else
99 return ARRAY_LINEAR_GENERAL;
100}
101
102static u32 evergreen_cs_get_num_banks(u32 nbanks)
103{
104 switch (nbanks) {
105 case 2:
106 return ADDR_SURF_2_BANK;
107 case 4:
108 return ADDR_SURF_4_BANK;
109 case 8:
110 default:
111 return ADDR_SURF_8_BANK;
112 case 16:
113 return ADDR_SURF_16_BANK;
114 }
115}
116
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400117static void evergreen_cs_track_init(struct evergreen_cs_track *track)
118{
119 int i;
120
121 for (i = 0; i < 8; i++) {
122 track->cb_color_fmask_bo[i] = NULL;
123 track->cb_color_cmask_bo[i] = NULL;
124 track->cb_color_cmask_slice[i] = 0;
125 track->cb_color_fmask_slice[i] = 0;
126 }
127
128 for (i = 0; i < 12; i++) {
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400129 track->cb_color_bo[i] = NULL;
130 track->cb_color_bo_offset[i] = 0xFFFFFFFF;
131 track->cb_color_info[i] = 0;
Jerome Glisse285484e2011-12-16 17:03:42 -0500132 track->cb_color_view[i] = 0xFFFFFFFF;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400133 track->cb_color_pitch[i] = 0;
Jerome Glissed2609872012-06-09 10:57:41 -0400134 track->cb_color_slice[i] = 0xfffffff;
135 track->cb_color_slice_idx[i] = 0;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400136 }
137 track->cb_target_mask = 0xFFFFFFFF;
138 track->cb_shader_mask = 0xFFFFFFFF;
Marek Olšák30838572012-03-19 03:09:35 +0100139 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400140
Jerome Glissed2609872012-06-09 10:57:41 -0400141 track->db_depth_slice = 0xffffffff;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400142 track->db_depth_view = 0xFFFFC000;
143 track->db_depth_size = 0xFFFFFFFF;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400144 track->db_depth_control = 0xFFFFFFFF;
145 track->db_z_info = 0xFFFFFFFF;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400146 track->db_z_read_offset = 0xFFFFFFFF;
147 track->db_z_write_offset = 0xFFFFFFFF;
148 track->db_z_read_bo = NULL;
149 track->db_z_write_bo = NULL;
150 track->db_s_info = 0xFFFFFFFF;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400151 track->db_s_read_offset = 0xFFFFFFFF;
152 track->db_s_write_offset = 0xFFFFFFFF;
153 track->db_s_read_bo = NULL;
154 track->db_s_write_bo = NULL;
Marek Olšák30838572012-03-19 03:09:35 +0100155 track->db_dirty = true;
Jerome Glisse88f50c82012-03-21 19:18:21 -0400156 track->htile_bo = NULL;
157 track->htile_offset = 0xFFFFFFFF;
158 track->htile_surface = 0;
Marek Olšákdd220a02012-01-27 12:17:59 -0500159
160 for (i = 0; i < 4; i++) {
161 track->vgt_strmout_size[i] = 0;
162 track->vgt_strmout_bo[i] = NULL;
163 track->vgt_strmout_bo_offset[i] = 0xFFFFFFFF;
Marek Olšákdd220a02012-01-27 12:17:59 -0500164 }
Marek Olšák30838572012-03-19 03:09:35 +0100165 track->streamout_dirty = true;
Marek Olšák779923b2012-03-08 00:56:00 +0100166 track->sx_misc_kill_all_prims = false;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400167}
168
Jerome Glisse285484e2011-12-16 17:03:42 -0500169struct eg_surface {
170 /* value gathered from cs */
171 unsigned nbx;
172 unsigned nby;
173 unsigned format;
174 unsigned mode;
175 unsigned nbanks;
176 unsigned bankw;
177 unsigned bankh;
178 unsigned tsplit;
179 unsigned mtilea;
180 unsigned nsamples;
181 /* output value */
182 unsigned bpe;
183 unsigned layer_size;
184 unsigned palign;
185 unsigned halign;
186 unsigned long base_align;
187};
188
189static int evergreen_surface_check_linear(struct radeon_cs_parser *p,
190 struct eg_surface *surf,
191 const char *prefix)
192{
193 surf->layer_size = surf->nbx * surf->nby * surf->bpe * surf->nsamples;
194 surf->base_align = surf->bpe;
195 surf->palign = 1;
196 surf->halign = 1;
197 return 0;
198}
199
200static int evergreen_surface_check_linear_aligned(struct radeon_cs_parser *p,
201 struct eg_surface *surf,
202 const char *prefix)
203{
204 struct evergreen_cs_track *track = p->track;
205 unsigned palign;
206
207 palign = MAX(64, track->group_size / surf->bpe);
208 surf->layer_size = surf->nbx * surf->nby * surf->bpe * surf->nsamples;
209 surf->base_align = track->group_size;
210 surf->palign = palign;
211 surf->halign = 1;
212 if (surf->nbx & (palign - 1)) {
213 if (prefix) {
214 dev_warn(p->dev, "%s:%d %s pitch %d invalid must be aligned with %d\n",
215 __func__, __LINE__, prefix, surf->nbx, palign);
216 }
217 return -EINVAL;
218 }
219 return 0;
220}
221
222static int evergreen_surface_check_1d(struct radeon_cs_parser *p,
223 struct eg_surface *surf,
224 const char *prefix)
225{
226 struct evergreen_cs_track *track = p->track;
227 unsigned palign;
228
229 palign = track->group_size / (8 * surf->bpe * surf->nsamples);
230 palign = MAX(8, palign);
231 surf->layer_size = surf->nbx * surf->nby * surf->bpe;
232 surf->base_align = track->group_size;
233 surf->palign = palign;
234 surf->halign = 8;
235 if ((surf->nbx & (palign - 1))) {
236 if (prefix) {
237 dev_warn(p->dev, "%s:%d %s pitch %d invalid must be aligned with %d (%d %d %d)\n",
238 __func__, __LINE__, prefix, surf->nbx, palign,
239 track->group_size, surf->bpe, surf->nsamples);
240 }
241 return -EINVAL;
242 }
243 if ((surf->nby & (8 - 1))) {
244 if (prefix) {
245 dev_warn(p->dev, "%s:%d %s height %d invalid must be aligned with 8\n",
246 __func__, __LINE__, prefix, surf->nby);
247 }
248 return -EINVAL;
249 }
250 return 0;
251}
252
253static int evergreen_surface_check_2d(struct radeon_cs_parser *p,
254 struct eg_surface *surf,
255 const char *prefix)
256{
257 struct evergreen_cs_track *track = p->track;
258 unsigned palign, halign, tileb, slice_pt;
Jerome Glissed2609872012-06-09 10:57:41 -0400259 unsigned mtile_pr, mtile_ps, mtileb;
Jerome Glisse285484e2011-12-16 17:03:42 -0500260
261 tileb = 64 * surf->bpe * surf->nsamples;
Jerome Glisse285484e2011-12-16 17:03:42 -0500262 slice_pt = 1;
263 if (tileb > surf->tsplit) {
264 slice_pt = tileb / surf->tsplit;
265 }
266 tileb = tileb / slice_pt;
267 /* macro tile width & height */
268 palign = (8 * surf->bankw * track->npipes) * surf->mtilea;
269 halign = (8 * surf->bankh * surf->nbanks) / surf->mtilea;
Peter Senna Tschudin74e4ca32012-10-24 16:42:26 +0200270 mtileb = (palign / 8) * (halign / 8) * tileb;
Jerome Glissed2609872012-06-09 10:57:41 -0400271 mtile_pr = surf->nbx / palign;
272 mtile_ps = (mtile_pr * surf->nby) / halign;
273 surf->layer_size = mtile_ps * mtileb * slice_pt;
Jerome Glisse285484e2011-12-16 17:03:42 -0500274 surf->base_align = (palign / 8) * (halign / 8) * tileb;
275 surf->palign = palign;
276 surf->halign = halign;
277
278 if ((surf->nbx & (palign - 1))) {
279 if (prefix) {
280 dev_warn(p->dev, "%s:%d %s pitch %d invalid must be aligned with %d\n",
281 __func__, __LINE__, prefix, surf->nbx, palign);
282 }
283 return -EINVAL;
284 }
285 if ((surf->nby & (halign - 1))) {
286 if (prefix) {
287 dev_warn(p->dev, "%s:%d %s height %d invalid must be aligned with %d\n",
288 __func__, __LINE__, prefix, surf->nby, halign);
289 }
290 return -EINVAL;
291 }
292
293 return 0;
294}
295
296static int evergreen_surface_check(struct radeon_cs_parser *p,
297 struct eg_surface *surf,
298 const char *prefix)
299{
300 /* some common value computed here */
301 surf->bpe = r600_fmt_get_blocksize(surf->format);
302
303 switch (surf->mode) {
304 case ARRAY_LINEAR_GENERAL:
305 return evergreen_surface_check_linear(p, surf, prefix);
306 case ARRAY_LINEAR_ALIGNED:
307 return evergreen_surface_check_linear_aligned(p, surf, prefix);
308 case ARRAY_1D_TILED_THIN1:
309 return evergreen_surface_check_1d(p, surf, prefix);
310 case ARRAY_2D_TILED_THIN1:
311 return evergreen_surface_check_2d(p, surf, prefix);
312 default:
Marek Olšák7df7c542012-03-19 03:09:32 +0100313 dev_warn(p->dev, "%s:%d %s invalid array mode %d\n",
314 __func__, __LINE__, prefix, surf->mode);
Jerome Glisse285484e2011-12-16 17:03:42 -0500315 return -EINVAL;
316 }
317 return -EINVAL;
318}
319
320static int evergreen_surface_value_conv_check(struct radeon_cs_parser *p,
321 struct eg_surface *surf,
322 const char *prefix)
323{
324 switch (surf->mode) {
325 case ARRAY_2D_TILED_THIN1:
326 break;
327 case ARRAY_LINEAR_GENERAL:
328 case ARRAY_LINEAR_ALIGNED:
329 case ARRAY_1D_TILED_THIN1:
330 return 0;
331 default:
Marek Olšák7df7c542012-03-19 03:09:32 +0100332 dev_warn(p->dev, "%s:%d %s invalid array mode %d\n",
333 __func__, __LINE__, prefix, surf->mode);
Jerome Glisse285484e2011-12-16 17:03:42 -0500334 return -EINVAL;
335 }
336
337 switch (surf->nbanks) {
338 case 0: surf->nbanks = 2; break;
339 case 1: surf->nbanks = 4; break;
340 case 2: surf->nbanks = 8; break;
341 case 3: surf->nbanks = 16; break;
342 default:
343 dev_warn(p->dev, "%s:%d %s invalid number of banks %d\n",
344 __func__, __LINE__, prefix, surf->nbanks);
345 return -EINVAL;
346 }
347 switch (surf->bankw) {
348 case 0: surf->bankw = 1; break;
349 case 1: surf->bankw = 2; break;
350 case 2: surf->bankw = 4; break;
351 case 3: surf->bankw = 8; break;
352 default:
353 dev_warn(p->dev, "%s:%d %s invalid bankw %d\n",
354 __func__, __LINE__, prefix, surf->bankw);
355 return -EINVAL;
356 }
357 switch (surf->bankh) {
358 case 0: surf->bankh = 1; break;
359 case 1: surf->bankh = 2; break;
360 case 2: surf->bankh = 4; break;
361 case 3: surf->bankh = 8; break;
362 default:
363 dev_warn(p->dev, "%s:%d %s invalid bankh %d\n",
364 __func__, __LINE__, prefix, surf->bankh);
365 return -EINVAL;
366 }
367 switch (surf->mtilea) {
368 case 0: surf->mtilea = 1; break;
369 case 1: surf->mtilea = 2; break;
370 case 2: surf->mtilea = 4; break;
371 case 3: surf->mtilea = 8; break;
372 default:
373 dev_warn(p->dev, "%s:%d %s invalid macro tile aspect %d\n",
374 __func__, __LINE__, prefix, surf->mtilea);
375 return -EINVAL;
376 }
377 switch (surf->tsplit) {
378 case 0: surf->tsplit = 64; break;
379 case 1: surf->tsplit = 128; break;
380 case 2: surf->tsplit = 256; break;
381 case 3: surf->tsplit = 512; break;
382 case 4: surf->tsplit = 1024; break;
383 case 5: surf->tsplit = 2048; break;
384 case 6: surf->tsplit = 4096; break;
385 default:
386 dev_warn(p->dev, "%s:%d %s invalid tile split %d\n",
387 __func__, __LINE__, prefix, surf->tsplit);
388 return -EINVAL;
389 }
390 return 0;
391}
392
393static int evergreen_cs_track_validate_cb(struct radeon_cs_parser *p, unsigned id)
394{
395 struct evergreen_cs_track *track = p->track;
396 struct eg_surface surf;
397 unsigned pitch, slice, mslice;
398 unsigned long offset;
399 int r;
400
401 mslice = G_028C6C_SLICE_MAX(track->cb_color_view[id]) + 1;
402 pitch = track->cb_color_pitch[id];
403 slice = track->cb_color_slice[id];
404 surf.nbx = (pitch + 1) * 8;
405 surf.nby = ((slice + 1) * 64) / surf.nbx;
406 surf.mode = G_028C70_ARRAY_MODE(track->cb_color_info[id]);
407 surf.format = G_028C70_FORMAT(track->cb_color_info[id]);
408 surf.tsplit = G_028C74_TILE_SPLIT(track->cb_color_attrib[id]);
409 surf.nbanks = G_028C74_NUM_BANKS(track->cb_color_attrib[id]);
410 surf.bankw = G_028C74_BANK_WIDTH(track->cb_color_attrib[id]);
411 surf.bankh = G_028C74_BANK_HEIGHT(track->cb_color_attrib[id]);
412 surf.mtilea = G_028C74_MACRO_TILE_ASPECT(track->cb_color_attrib[id]);
413 surf.nsamples = 1;
414
415 if (!r600_fmt_is_valid_color(surf.format)) {
416 dev_warn(p->dev, "%s:%d cb invalid format %d for %d (0x%08x)\n",
417 __func__, __LINE__, surf.format,
418 id, track->cb_color_info[id]);
419 return -EINVAL;
420 }
421
422 r = evergreen_surface_value_conv_check(p, &surf, "cb");
423 if (r) {
424 return r;
425 }
426
427 r = evergreen_surface_check(p, &surf, "cb");
428 if (r) {
429 dev_warn(p->dev, "%s:%d cb[%d] invalid (0x%08x 0x%08x 0x%08x 0x%08x)\n",
430 __func__, __LINE__, id, track->cb_color_pitch[id],
431 track->cb_color_slice[id], track->cb_color_attrib[id],
432 track->cb_color_info[id]);
433 return r;
434 }
435
436 offset = track->cb_color_bo_offset[id] << 8;
437 if (offset & (surf.base_align - 1)) {
438 dev_warn(p->dev, "%s:%d cb[%d] bo base %ld not aligned with %ld\n",
439 __func__, __LINE__, id, offset, surf.base_align);
440 return -EINVAL;
441 }
442
443 offset += surf.layer_size * mslice;
444 if (offset > radeon_bo_size(track->cb_color_bo[id])) {
Jerome Glissed2609872012-06-09 10:57:41 -0400445 /* old ddx are broken they allocate bo with w*h*bpp but
446 * program slice with ALIGN(h, 8), catch this and patch
447 * command stream.
448 */
449 if (!surf.mode) {
450 volatile u32 *ib = p->ib.ptr;
451 unsigned long tmp, nby, bsize, size, min = 0;
452
453 /* find the height the ddx wants */
454 if (surf.nby > 8) {
455 min = surf.nby - 8;
456 }
457 bsize = radeon_bo_size(track->cb_color_bo[id]);
458 tmp = track->cb_color_bo_offset[id] << 8;
459 for (nby = surf.nby; nby > min; nby--) {
460 size = nby * surf.nbx * surf.bpe * surf.nsamples;
461 if ((tmp + size * mslice) <= bsize) {
462 break;
463 }
464 }
465 if (nby > min) {
466 surf.nby = nby;
467 slice = ((nby * surf.nbx) / 64) - 1;
468 if (!evergreen_surface_check(p, &surf, "cb")) {
469 /* check if this one works */
470 tmp += surf.layer_size * mslice;
471 if (tmp <= bsize) {
472 ib[track->cb_color_slice_idx[id]] = slice;
473 goto old_ddx_ok;
474 }
475 }
476 }
477 }
Jerome Glisse285484e2011-12-16 17:03:42 -0500478 dev_warn(p->dev, "%s:%d cb[%d] bo too small (layer size %d, "
479 "offset %d, max layer %d, bo size %ld, slice %d)\n",
480 __func__, __LINE__, id, surf.layer_size,
481 track->cb_color_bo_offset[id] << 8, mslice,
482 radeon_bo_size(track->cb_color_bo[id]), slice);
483 dev_warn(p->dev, "%s:%d problematic surf: (%d %d) (%d %d %d %d %d %d %d)\n",
484 __func__, __LINE__, surf.nbx, surf.nby,
485 surf.mode, surf.bpe, surf.nsamples,
486 surf.bankw, surf.bankh,
487 surf.tsplit, surf.mtilea);
488 return -EINVAL;
489 }
Jerome Glissed2609872012-06-09 10:57:41 -0400490old_ddx_ok:
Jerome Glisse285484e2011-12-16 17:03:42 -0500491
492 return 0;
493}
494
Jerome Glisse88f50c82012-03-21 19:18:21 -0400495static int evergreen_cs_track_validate_htile(struct radeon_cs_parser *p,
496 unsigned nbx, unsigned nby)
497{
498 struct evergreen_cs_track *track = p->track;
499 unsigned long size;
500
501 if (track->htile_bo == NULL) {
502 dev_warn(p->dev, "%s:%d htile enabled without htile surface 0x%08x\n",
503 __func__, __LINE__, track->db_z_info);
504 return -EINVAL;
505 }
506
507 if (G_028ABC_LINEAR(track->htile_surface)) {
508 /* pitch must be 16 htiles aligned == 16 * 8 pixel aligned */
509 nbx = round_up(nbx, 16 * 8);
510 /* height is npipes htiles aligned == npipes * 8 pixel aligned */
511 nby = round_up(nby, track->npipes * 8);
512 } else {
Jerome Glisse4ac05332012-12-13 12:08:11 -0500513 /* always assume 8x8 htile */
514 /* align is htile align * 8, htile align vary according to
515 * number of pipe and tile width and nby
516 */
Jerome Glisse88f50c82012-03-21 19:18:21 -0400517 switch (track->npipes) {
518 case 8:
Jerome Glisse4ac05332012-12-13 12:08:11 -0500519 /* HTILE_WIDTH = 8 & HTILE_HEIGHT = 8*/
Jerome Glisse88f50c82012-03-21 19:18:21 -0400520 nbx = round_up(nbx, 64 * 8);
521 nby = round_up(nby, 64 * 8);
522 break;
523 case 4:
Jerome Glisse4ac05332012-12-13 12:08:11 -0500524 /* HTILE_WIDTH = 8 & HTILE_HEIGHT = 8*/
Jerome Glisse88f50c82012-03-21 19:18:21 -0400525 nbx = round_up(nbx, 64 * 8);
526 nby = round_up(nby, 32 * 8);
527 break;
528 case 2:
Jerome Glisse4ac05332012-12-13 12:08:11 -0500529 /* HTILE_WIDTH = 8 & HTILE_HEIGHT = 8*/
Jerome Glisse88f50c82012-03-21 19:18:21 -0400530 nbx = round_up(nbx, 32 * 8);
531 nby = round_up(nby, 32 * 8);
532 break;
533 case 1:
Jerome Glisse4ac05332012-12-13 12:08:11 -0500534 /* HTILE_WIDTH = 8 & HTILE_HEIGHT = 8*/
Jerome Glisse88f50c82012-03-21 19:18:21 -0400535 nbx = round_up(nbx, 32 * 8);
536 nby = round_up(nby, 16 * 8);
537 break;
538 default:
539 dev_warn(p->dev, "%s:%d invalid num pipes %d\n",
540 __func__, __LINE__, track->npipes);
541 return -EINVAL;
542 }
543 }
544 /* compute number of htile */
Jerome Glisse4ac05332012-12-13 12:08:11 -0500545 nbx = nbx >> 3;
546 nby = nby >> 3;
547 /* size must be aligned on npipes * 2K boundary */
548 size = roundup(nbx * nby * 4, track->npipes * (2 << 10));
Jerome Glisse88f50c82012-03-21 19:18:21 -0400549 size += track->htile_offset;
550
551 if (size > radeon_bo_size(track->htile_bo)) {
552 dev_warn(p->dev, "%s:%d htile surface too small %ld for %ld (%d %d)\n",
553 __func__, __LINE__, radeon_bo_size(track->htile_bo),
554 size, nbx, nby);
555 return -EINVAL;
556 }
557 return 0;
558}
559
Jerome Glisse285484e2011-12-16 17:03:42 -0500560static int evergreen_cs_track_validate_stencil(struct radeon_cs_parser *p)
561{
562 struct evergreen_cs_track *track = p->track;
563 struct eg_surface surf;
564 unsigned pitch, slice, mslice;
565 unsigned long offset;
566 int r;
567
568 mslice = G_028008_SLICE_MAX(track->db_depth_view) + 1;
569 pitch = G_028058_PITCH_TILE_MAX(track->db_depth_size);
570 slice = track->db_depth_slice;
571 surf.nbx = (pitch + 1) * 8;
572 surf.nby = ((slice + 1) * 64) / surf.nbx;
573 surf.mode = G_028040_ARRAY_MODE(track->db_z_info);
574 surf.format = G_028044_FORMAT(track->db_s_info);
575 surf.tsplit = G_028044_TILE_SPLIT(track->db_s_info);
576 surf.nbanks = G_028040_NUM_BANKS(track->db_z_info);
577 surf.bankw = G_028040_BANK_WIDTH(track->db_z_info);
578 surf.bankh = G_028040_BANK_HEIGHT(track->db_z_info);
579 surf.mtilea = G_028040_MACRO_TILE_ASPECT(track->db_z_info);
580 surf.nsamples = 1;
581
582 if (surf.format != 1) {
583 dev_warn(p->dev, "%s:%d stencil invalid format %d\n",
584 __func__, __LINE__, surf.format);
585 return -EINVAL;
586 }
587 /* replace by color format so we can use same code */
588 surf.format = V_028C70_COLOR_8;
589
590 r = evergreen_surface_value_conv_check(p, &surf, "stencil");
591 if (r) {
592 return r;
593 }
594
595 r = evergreen_surface_check(p, &surf, NULL);
596 if (r) {
597 /* old userspace doesn't compute proper depth/stencil alignment
598 * check that alignment against a bigger byte per elements and
599 * only report if that alignment is wrong too.
600 */
601 surf.format = V_028C70_COLOR_8_8_8_8;
602 r = evergreen_surface_check(p, &surf, "stencil");
603 if (r) {
604 dev_warn(p->dev, "%s:%d stencil invalid (0x%08x 0x%08x 0x%08x 0x%08x)\n",
605 __func__, __LINE__, track->db_depth_size,
606 track->db_depth_slice, track->db_s_info, track->db_z_info);
607 }
608 return r;
609 }
610
611 offset = track->db_s_read_offset << 8;
612 if (offset & (surf.base_align - 1)) {
613 dev_warn(p->dev, "%s:%d stencil read bo base %ld not aligned with %ld\n",
614 __func__, __LINE__, offset, surf.base_align);
615 return -EINVAL;
616 }
617 offset += surf.layer_size * mslice;
618 if (offset > radeon_bo_size(track->db_s_read_bo)) {
619 dev_warn(p->dev, "%s:%d stencil read bo too small (layer size %d, "
620 "offset %ld, max layer %d, bo size %ld)\n",
621 __func__, __LINE__, surf.layer_size,
622 (unsigned long)track->db_s_read_offset << 8, mslice,
623 radeon_bo_size(track->db_s_read_bo));
624 dev_warn(p->dev, "%s:%d stencil invalid (0x%08x 0x%08x 0x%08x 0x%08x)\n",
625 __func__, __LINE__, track->db_depth_size,
626 track->db_depth_slice, track->db_s_info, track->db_z_info);
627 return -EINVAL;
628 }
629
630 offset = track->db_s_write_offset << 8;
631 if (offset & (surf.base_align - 1)) {
632 dev_warn(p->dev, "%s:%d stencil write bo base %ld not aligned with %ld\n",
633 __func__, __LINE__, offset, surf.base_align);
634 return -EINVAL;
635 }
636 offset += surf.layer_size * mslice;
637 if (offset > radeon_bo_size(track->db_s_write_bo)) {
638 dev_warn(p->dev, "%s:%d stencil write bo too small (layer size %d, "
639 "offset %ld, max layer %d, bo size %ld)\n",
640 __func__, __LINE__, surf.layer_size,
641 (unsigned long)track->db_s_write_offset << 8, mslice,
642 radeon_bo_size(track->db_s_write_bo));
643 return -EINVAL;
644 }
645
Jerome Glisse88f50c82012-03-21 19:18:21 -0400646 /* hyperz */
647 if (G_028040_TILE_SURFACE_ENABLE(track->db_z_info)) {
648 r = evergreen_cs_track_validate_htile(p, surf.nbx, surf.nby);
649 if (r) {
650 return r;
651 }
652 }
653
Jerome Glisse285484e2011-12-16 17:03:42 -0500654 return 0;
655}
656
657static int evergreen_cs_track_validate_depth(struct radeon_cs_parser *p)
658{
659 struct evergreen_cs_track *track = p->track;
660 struct eg_surface surf;
661 unsigned pitch, slice, mslice;
662 unsigned long offset;
663 int r;
664
665 mslice = G_028008_SLICE_MAX(track->db_depth_view) + 1;
666 pitch = G_028058_PITCH_TILE_MAX(track->db_depth_size);
667 slice = track->db_depth_slice;
668 surf.nbx = (pitch + 1) * 8;
669 surf.nby = ((slice + 1) * 64) / surf.nbx;
670 surf.mode = G_028040_ARRAY_MODE(track->db_z_info);
671 surf.format = G_028040_FORMAT(track->db_z_info);
672 surf.tsplit = G_028040_TILE_SPLIT(track->db_z_info);
673 surf.nbanks = G_028040_NUM_BANKS(track->db_z_info);
674 surf.bankw = G_028040_BANK_WIDTH(track->db_z_info);
675 surf.bankh = G_028040_BANK_HEIGHT(track->db_z_info);
676 surf.mtilea = G_028040_MACRO_TILE_ASPECT(track->db_z_info);
677 surf.nsamples = 1;
678
679 switch (surf.format) {
680 case V_028040_Z_16:
681 surf.format = V_028C70_COLOR_16;
682 break;
683 case V_028040_Z_24:
684 case V_028040_Z_32_FLOAT:
685 surf.format = V_028C70_COLOR_8_8_8_8;
686 break;
687 default:
688 dev_warn(p->dev, "%s:%d depth invalid format %d\n",
689 __func__, __LINE__, surf.format);
690 return -EINVAL;
691 }
692
693 r = evergreen_surface_value_conv_check(p, &surf, "depth");
694 if (r) {
695 dev_warn(p->dev, "%s:%d depth invalid (0x%08x 0x%08x 0x%08x)\n",
696 __func__, __LINE__, track->db_depth_size,
697 track->db_depth_slice, track->db_z_info);
698 return r;
699 }
700
701 r = evergreen_surface_check(p, &surf, "depth");
702 if (r) {
703 dev_warn(p->dev, "%s:%d depth invalid (0x%08x 0x%08x 0x%08x)\n",
704 __func__, __LINE__, track->db_depth_size,
705 track->db_depth_slice, track->db_z_info);
706 return r;
707 }
708
709 offset = track->db_z_read_offset << 8;
710 if (offset & (surf.base_align - 1)) {
711 dev_warn(p->dev, "%s:%d stencil read bo base %ld not aligned with %ld\n",
712 __func__, __LINE__, offset, surf.base_align);
713 return -EINVAL;
714 }
715 offset += surf.layer_size * mslice;
716 if (offset > radeon_bo_size(track->db_z_read_bo)) {
717 dev_warn(p->dev, "%s:%d depth read bo too small (layer size %d, "
718 "offset %ld, max layer %d, bo size %ld)\n",
719 __func__, __LINE__, surf.layer_size,
720 (unsigned long)track->db_z_read_offset << 8, mslice,
721 radeon_bo_size(track->db_z_read_bo));
722 return -EINVAL;
723 }
724
725 offset = track->db_z_write_offset << 8;
726 if (offset & (surf.base_align - 1)) {
727 dev_warn(p->dev, "%s:%d stencil write bo base %ld not aligned with %ld\n",
728 __func__, __LINE__, offset, surf.base_align);
729 return -EINVAL;
730 }
731 offset += surf.layer_size * mslice;
732 if (offset > radeon_bo_size(track->db_z_write_bo)) {
733 dev_warn(p->dev, "%s:%d depth write bo too small (layer size %d, "
734 "offset %ld, max layer %d, bo size %ld)\n",
735 __func__, __LINE__, surf.layer_size,
736 (unsigned long)track->db_z_write_offset << 8, mslice,
737 radeon_bo_size(track->db_z_write_bo));
738 return -EINVAL;
739 }
740
Jerome Glisse88f50c82012-03-21 19:18:21 -0400741 /* hyperz */
742 if (G_028040_TILE_SURFACE_ENABLE(track->db_z_info)) {
743 r = evergreen_cs_track_validate_htile(p, surf.nbx, surf.nby);
744 if (r) {
745 return r;
746 }
747 }
748
Jerome Glisse285484e2011-12-16 17:03:42 -0500749 return 0;
750}
751
752static int evergreen_cs_track_validate_texture(struct radeon_cs_parser *p,
753 struct radeon_bo *texture,
754 struct radeon_bo *mipmap,
755 unsigned idx)
756{
757 struct eg_surface surf;
758 unsigned long toffset, moffset;
759 unsigned dim, llevel, mslice, width, height, depth, i;
Dan Carpenter42b923b2012-02-14 10:38:11 +0300760 u32 texdw[8];
Jerome Glisse285484e2011-12-16 17:03:42 -0500761 int r;
762
763 texdw[0] = radeon_get_ib_value(p, idx + 0);
764 texdw[1] = radeon_get_ib_value(p, idx + 1);
765 texdw[2] = radeon_get_ib_value(p, idx + 2);
766 texdw[3] = radeon_get_ib_value(p, idx + 3);
767 texdw[4] = radeon_get_ib_value(p, idx + 4);
768 texdw[5] = radeon_get_ib_value(p, idx + 5);
769 texdw[6] = radeon_get_ib_value(p, idx + 6);
770 texdw[7] = radeon_get_ib_value(p, idx + 7);
771 dim = G_030000_DIM(texdw[0]);
772 llevel = G_030014_LAST_LEVEL(texdw[5]);
773 mslice = G_030014_LAST_ARRAY(texdw[5]) + 1;
774 width = G_030000_TEX_WIDTH(texdw[0]) + 1;
775 height = G_030004_TEX_HEIGHT(texdw[1]) + 1;
776 depth = G_030004_TEX_DEPTH(texdw[1]) + 1;
777 surf.format = G_03001C_DATA_FORMAT(texdw[7]);
778 surf.nbx = (G_030000_PITCH(texdw[0]) + 1) * 8;
779 surf.nbx = r600_fmt_get_nblocksx(surf.format, surf.nbx);
780 surf.nby = r600_fmt_get_nblocksy(surf.format, height);
781 surf.mode = G_030004_ARRAY_MODE(texdw[1]);
782 surf.tsplit = G_030018_TILE_SPLIT(texdw[6]);
783 surf.nbanks = G_03001C_NUM_BANKS(texdw[7]);
784 surf.bankw = G_03001C_BANK_WIDTH(texdw[7]);
785 surf.bankh = G_03001C_BANK_HEIGHT(texdw[7]);
786 surf.mtilea = G_03001C_MACRO_TILE_ASPECT(texdw[7]);
787 surf.nsamples = 1;
788 toffset = texdw[2] << 8;
789 moffset = texdw[3] << 8;
790
791 if (!r600_fmt_is_valid_texture(surf.format, p->family)) {
792 dev_warn(p->dev, "%s:%d texture invalid format %d\n",
793 __func__, __LINE__, surf.format);
794 return -EINVAL;
795 }
796 switch (dim) {
797 case V_030000_SQ_TEX_DIM_1D:
798 case V_030000_SQ_TEX_DIM_2D:
799 case V_030000_SQ_TEX_DIM_CUBEMAP:
800 case V_030000_SQ_TEX_DIM_1D_ARRAY:
801 case V_030000_SQ_TEX_DIM_2D_ARRAY:
802 depth = 1;
Marek Olšákb51ad122012-08-09 16:34:16 +0200803 break;
804 case V_030000_SQ_TEX_DIM_2D_MSAA:
805 case V_030000_SQ_TEX_DIM_2D_ARRAY_MSAA:
806 surf.nsamples = 1 << llevel;
807 llevel = 0;
808 depth = 1;
809 break;
Jerome Glisse285484e2011-12-16 17:03:42 -0500810 case V_030000_SQ_TEX_DIM_3D:
811 break;
812 default:
813 dev_warn(p->dev, "%s:%d texture invalid dimension %d\n",
814 __func__, __LINE__, dim);
815 return -EINVAL;
816 }
817
818 r = evergreen_surface_value_conv_check(p, &surf, "texture");
819 if (r) {
820 return r;
821 }
822
823 /* align height */
824 evergreen_surface_check(p, &surf, NULL);
825 surf.nby = ALIGN(surf.nby, surf.halign);
826
827 r = evergreen_surface_check(p, &surf, "texture");
828 if (r) {
829 dev_warn(p->dev, "%s:%d texture invalid 0x%08x 0x%08x 0x%08x 0x%08x 0x%08x 0x%08x\n",
830 __func__, __LINE__, texdw[0], texdw[1], texdw[4],
831 texdw[5], texdw[6], texdw[7]);
832 return r;
833 }
834
835 /* check texture size */
836 if (toffset & (surf.base_align - 1)) {
837 dev_warn(p->dev, "%s:%d texture bo base %ld not aligned with %ld\n",
838 __func__, __LINE__, toffset, surf.base_align);
839 return -EINVAL;
840 }
Marek Olšák774c3892013-03-01 13:40:31 +0100841 if (surf.nsamples <= 1 && moffset & (surf.base_align - 1)) {
Jerome Glisse285484e2011-12-16 17:03:42 -0500842 dev_warn(p->dev, "%s:%d mipmap bo base %ld not aligned with %ld\n",
843 __func__, __LINE__, moffset, surf.base_align);
844 return -EINVAL;
845 }
846 if (dim == SQ_TEX_DIM_3D) {
847 toffset += surf.layer_size * depth;
848 } else {
849 toffset += surf.layer_size * mslice;
850 }
851 if (toffset > radeon_bo_size(texture)) {
852 dev_warn(p->dev, "%s:%d texture bo too small (layer size %d, "
853 "offset %ld, max layer %d, depth %d, bo size %ld) (%d %d)\n",
854 __func__, __LINE__, surf.layer_size,
855 (unsigned long)texdw[2] << 8, mslice,
856 depth, radeon_bo_size(texture),
857 surf.nbx, surf.nby);
858 return -EINVAL;
859 }
860
Marek Olšák61051af2012-09-25 03:34:01 +0200861 if (!mipmap) {
862 if (llevel) {
863 dev_warn(p->dev, "%s:%i got NULL MIP_ADDRESS relocation\n",
864 __func__, __LINE__);
865 return -EINVAL;
866 } else {
867 return 0; /* everything's ok */
868 }
869 }
870
Jerome Glisse285484e2011-12-16 17:03:42 -0500871 /* check mipmap size */
872 for (i = 1; i <= llevel; i++) {
873 unsigned w, h, d;
874
875 w = r600_mip_minify(width, i);
876 h = r600_mip_minify(height, i);
877 d = r600_mip_minify(depth, i);
878 surf.nbx = r600_fmt_get_nblocksx(surf.format, w);
879 surf.nby = r600_fmt_get_nblocksy(surf.format, h);
880
881 switch (surf.mode) {
882 case ARRAY_2D_TILED_THIN1:
883 if (surf.nbx < surf.palign || surf.nby < surf.halign) {
884 surf.mode = ARRAY_1D_TILED_THIN1;
885 }
886 /* recompute alignment */
887 evergreen_surface_check(p, &surf, NULL);
888 break;
889 case ARRAY_LINEAR_GENERAL:
890 case ARRAY_LINEAR_ALIGNED:
891 case ARRAY_1D_TILED_THIN1:
892 break;
893 default:
894 dev_warn(p->dev, "%s:%d invalid array mode %d\n",
895 __func__, __LINE__, surf.mode);
896 return -EINVAL;
897 }
898 surf.nbx = ALIGN(surf.nbx, surf.palign);
899 surf.nby = ALIGN(surf.nby, surf.halign);
900
901 r = evergreen_surface_check(p, &surf, "mipmap");
902 if (r) {
903 return r;
904 }
905
906 if (dim == SQ_TEX_DIM_3D) {
907 moffset += surf.layer_size * d;
908 } else {
909 moffset += surf.layer_size * mslice;
910 }
911 if (moffset > radeon_bo_size(mipmap)) {
912 dev_warn(p->dev, "%s:%d mipmap [%d] bo too small (layer size %d, "
913 "offset %ld, coffset %ld, max layer %d, depth %d, "
914 "bo size %ld) level0 (%d %d %d)\n",
915 __func__, __LINE__, i, surf.layer_size,
916 (unsigned long)texdw[3] << 8, moffset, mslice,
917 d, radeon_bo_size(mipmap),
918 width, height, depth);
919 dev_warn(p->dev, "%s:%d problematic surf: (%d %d) (%d %d %d %d %d %d %d)\n",
920 __func__, __LINE__, surf.nbx, surf.nby,
921 surf.mode, surf.bpe, surf.nsamples,
922 surf.bankw, surf.bankh,
923 surf.tsplit, surf.mtilea);
924 return -EINVAL;
925 }
926 }
927
928 return 0;
929}
930
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400931static int evergreen_cs_track_check(struct radeon_cs_parser *p)
932{
933 struct evergreen_cs_track *track = p->track;
Marek Olšák7e9fa5f2012-03-19 03:09:34 +0100934 unsigned tmp, i;
Jerome Glisse285484e2011-12-16 17:03:42 -0500935 int r;
Marek Olšák7e9fa5f2012-03-19 03:09:34 +0100936 unsigned buffer_mask = 0;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400937
Marek Olšákdd220a02012-01-27 12:17:59 -0500938 /* check streamout */
Marek Olšák30838572012-03-19 03:09:35 +0100939 if (track->streamout_dirty && track->vgt_strmout_config) {
Marek Olšák7e9fa5f2012-03-19 03:09:34 +0100940 for (i = 0; i < 4; i++) {
941 if (track->vgt_strmout_config & (1 << i)) {
942 buffer_mask |= (track->vgt_strmout_buffer_config >> (i * 4)) & 0xf;
943 }
944 }
945
946 for (i = 0; i < 4; i++) {
947 if (buffer_mask & (1 << i)) {
948 if (track->vgt_strmout_bo[i]) {
949 u64 offset = (u64)track->vgt_strmout_bo_offset[i] +
950 (u64)track->vgt_strmout_size[i];
951 if (offset > radeon_bo_size(track->vgt_strmout_bo[i])) {
952 DRM_ERROR("streamout %d bo too small: 0x%llx, 0x%lx\n",
953 i, offset,
954 radeon_bo_size(track->vgt_strmout_bo[i]));
Marek Olšákdd220a02012-01-27 12:17:59 -0500955 return -EINVAL;
956 }
Marek Olšák7e9fa5f2012-03-19 03:09:34 +0100957 } else {
958 dev_warn(p->dev, "No buffer for streamout %d\n", i);
959 return -EINVAL;
Marek Olšákdd220a02012-01-27 12:17:59 -0500960 }
961 }
962 }
Marek Olšák30838572012-03-19 03:09:35 +0100963 track->streamout_dirty = false;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -0400964 }
965
Marek Olšák779923b2012-03-08 00:56:00 +0100966 if (track->sx_misc_kill_all_prims)
967 return 0;
968
Jerome Glisse285484e2011-12-16 17:03:42 -0500969 /* check that we have a cb for each enabled target
970 */
Marek Olšák30838572012-03-19 03:09:35 +0100971 if (track->cb_dirty) {
972 tmp = track->cb_target_mask;
973 for (i = 0; i < 8; i++) {
Marek Olšák56492e02014-01-08 18:16:26 +0100974 u32 format = G_028C70_FORMAT(track->cb_color_info[i]);
975
976 if (format != V_028C70_COLOR_INVALID &&
977 (tmp >> (i * 4)) & 0xF) {
Marek Olšák30838572012-03-19 03:09:35 +0100978 /* at least one component is enabled */
979 if (track->cb_color_bo[i] == NULL) {
980 dev_warn(p->dev, "%s:%d mask 0x%08X | 0x%08X no cb for %d\n",
981 __func__, __LINE__, track->cb_target_mask, track->cb_shader_mask, i);
982 return -EINVAL;
983 }
984 /* check cb */
985 r = evergreen_cs_track_validate_cb(p, i);
986 if (r) {
987 return r;
988 }
Jerome Glisse285484e2011-12-16 17:03:42 -0500989 }
990 }
Marek Olšák30838572012-03-19 03:09:35 +0100991 track->cb_dirty = false;
Jerome Glisse285484e2011-12-16 17:03:42 -0500992 }
993
Marek Olšák30838572012-03-19 03:09:35 +0100994 if (track->db_dirty) {
995 /* Check stencil buffer */
Marek Olšák0f457e42012-07-29 16:24:57 +0200996 if (G_028044_FORMAT(track->db_s_info) != V_028044_STENCIL_INVALID &&
997 G_028800_STENCIL_ENABLE(track->db_depth_control)) {
Marek Olšák30838572012-03-19 03:09:35 +0100998 r = evergreen_cs_track_validate_stencil(p);
999 if (r)
1000 return r;
1001 }
1002 /* Check depth buffer */
Marek Olšák0f457e42012-07-29 16:24:57 +02001003 if (G_028040_FORMAT(track->db_z_info) != V_028040_Z_INVALID &&
1004 G_028800_Z_ENABLE(track->db_depth_control)) {
Marek Olšák30838572012-03-19 03:09:35 +01001005 r = evergreen_cs_track_validate_depth(p);
1006 if (r)
1007 return r;
1008 }
1009 track->db_dirty = false;
Jerome Glisse285484e2011-12-16 17:03:42 -05001010 }
1011
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001012 return 0;
1013}
1014
1015/**
Ilija Hadzic40592a12013-01-02 18:27:43 -05001016 * evergreen_cs_packet_parse_vline() - parse userspace VLINE packet
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001017 * @parser: parser structure holding parsing context.
1018 *
Ilija Hadzic40592a12013-01-02 18:27:43 -05001019 * This is an Evergreen(+)-specific function for parsing VLINE packets.
1020 * Real work is done by r600_cs_common_vline_parse function.
1021 * Here we just set up ASIC-specific register table and call
1022 * the common implementation function.
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001023 */
1024static int evergreen_cs_packet_parse_vline(struct radeon_cs_parser *p)
1025{
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001026
Ilija Hadzic40592a12013-01-02 18:27:43 -05001027 static uint32_t vline_start_end[6] = {
1028 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC0_REGISTER_OFFSET,
1029 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC1_REGISTER_OFFSET,
1030 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC2_REGISTER_OFFSET,
1031 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC3_REGISTER_OFFSET,
1032 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC4_REGISTER_OFFSET,
1033 EVERGREEN_VLINE_START_END + EVERGREEN_CRTC5_REGISTER_OFFSET
1034 };
1035 static uint32_t vline_status[6] = {
1036 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET,
1037 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET,
1038 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET,
1039 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET,
1040 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET,
1041 EVERGREEN_VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET
1042 };
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001043
Ilija Hadzic40592a12013-01-02 18:27:43 -05001044 return r600_cs_common_vline_parse(p, vline_start_end, vline_status);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001045}
1046
1047static int evergreen_packet0_check(struct radeon_cs_parser *p,
1048 struct radeon_cs_packet *pkt,
1049 unsigned idx, unsigned reg)
1050{
1051 int r;
1052
1053 switch (reg) {
1054 case EVERGREEN_VLINE_START_END:
1055 r = evergreen_cs_packet_parse_vline(p);
1056 if (r) {
1057 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
1058 idx, reg);
1059 return r;
1060 }
1061 break;
1062 default:
1063 printk(KERN_ERR "Forbidden register 0x%04X in cs at %d\n",
1064 reg, idx);
1065 return -EINVAL;
1066 }
1067 return 0;
1068}
1069
1070static int evergreen_cs_parse_packet0(struct radeon_cs_parser *p,
1071 struct radeon_cs_packet *pkt)
1072{
1073 unsigned reg, i;
1074 unsigned idx;
1075 int r;
1076
1077 idx = pkt->idx + 1;
1078 reg = pkt->reg;
1079 for (i = 0; i <= pkt->count; i++, idx++, reg += 4) {
1080 r = evergreen_packet0_check(p, pkt, idx, reg);
1081 if (r) {
1082 return r;
1083 }
1084 }
1085 return 0;
1086}
1087
1088/**
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03001089 * evergreen_cs_handle_reg() - process registers that need special handling.
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001090 * @parser: parser structure holding parsing context
1091 * @reg: register we are testing
1092 * @idx: index into the cs buffer
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001093 */
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03001094static int evergreen_cs_handle_reg(struct radeon_cs_parser *p, u32 reg, u32 idx)
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001095{
1096 struct evergreen_cs_track *track = (struct evergreen_cs_track *)p->track;
Christian König1d0c0942014-11-27 14:48:42 +01001097 struct radeon_bo_list *reloc;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03001098 u32 tmp, *ib;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001099 int r;
1100
Jerome Glissef2e39222012-05-09 15:35:02 +02001101 ib = p->ib.ptr;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001102 switch (reg) {
Lucas De Marchi25985ed2011-03-30 22:57:33 -03001103 /* force following reg to 0 in an attempt to disable out buffer
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001104 * which will need us to better understand how it works to perform
1105 * security check on it (Jerome)
1106 */
1107 case SQ_ESGS_RING_SIZE:
1108 case SQ_GSVS_RING_SIZE:
1109 case SQ_ESTMP_RING_SIZE:
1110 case SQ_GSTMP_RING_SIZE:
1111 case SQ_HSTMP_RING_SIZE:
1112 case SQ_LSTMP_RING_SIZE:
1113 case SQ_PSTMP_RING_SIZE:
1114 case SQ_VSTMP_RING_SIZE:
1115 case SQ_ESGS_RING_ITEMSIZE:
1116 case SQ_ESTMP_RING_ITEMSIZE:
1117 case SQ_GSTMP_RING_ITEMSIZE:
1118 case SQ_GSVS_RING_ITEMSIZE:
1119 case SQ_GS_VERT_ITEMSIZE:
1120 case SQ_GS_VERT_ITEMSIZE_1:
1121 case SQ_GS_VERT_ITEMSIZE_2:
1122 case SQ_GS_VERT_ITEMSIZE_3:
1123 case SQ_GSVS_RING_OFFSET_1:
1124 case SQ_GSVS_RING_OFFSET_2:
1125 case SQ_GSVS_RING_OFFSET_3:
1126 case SQ_HSTMP_RING_ITEMSIZE:
1127 case SQ_LSTMP_RING_ITEMSIZE:
1128 case SQ_PSTMP_RING_ITEMSIZE:
1129 case SQ_VSTMP_RING_ITEMSIZE:
1130 case VGT_TF_RING_SIZE:
1131 /* get value to populate the IB don't remove */
Alex Deucher8aa75002011-03-02 20:07:40 -05001132 /*tmp =radeon_get_ib_value(p, idx);
1133 ib[idx] = 0;*/
1134 break;
1135 case SQ_ESGS_RING_BASE:
1136 case SQ_GSVS_RING_BASE:
1137 case SQ_ESTMP_RING_BASE:
1138 case SQ_GSTMP_RING_BASE:
1139 case SQ_HSTMP_RING_BASE:
1140 case SQ_LSTMP_RING_BASE:
1141 case SQ_PSTMP_RING_BASE:
1142 case SQ_VSTMP_RING_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001143 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher8aa75002011-03-02 20:07:40 -05001144 if (r) {
1145 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1146 "0x%04X\n", reg);
1147 return -EINVAL;
1148 }
Christian Königdf0af442014-03-03 12:38:08 +01001149 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001150 break;
1151 case DB_DEPTH_CONTROL:
1152 track->db_depth_control = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001153 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001154 break;
Alex Deucherc175ca92011-03-02 20:07:37 -05001155 case CAYMAN_DB_EQAA:
1156 if (p->rdev->family < CHIP_CAYMAN) {
1157 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1158 "0x%04X\n", reg);
1159 return -EINVAL;
1160 }
1161 break;
1162 case CAYMAN_DB_DEPTH_INFO:
1163 if (p->rdev->family < CHIP_CAYMAN) {
1164 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1165 "0x%04X\n", reg);
1166 return -EINVAL;
1167 }
1168 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001169 case DB_Z_INFO:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001170 track->db_z_info = radeon_get_ib_value(p, idx);
Jerome Glisse721604a2012-01-05 22:11:05 -05001171 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05001172 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšáke70f2242011-10-25 01:38:45 +02001173 if (r) {
1174 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1175 "0x%04X\n", reg);
1176 return -EINVAL;
1177 }
1178 ib[idx] &= ~Z_ARRAY_MODE(0xf);
1179 track->db_z_info &= ~Z_ARRAY_MODE(0xf);
Christian Königdf0af442014-03-03 12:38:08 +01001180 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
1181 track->db_z_info |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
1182 if (reloc->tiling_flags & RADEON_TILING_MACRO) {
Jerome Glisse285484e2011-12-16 17:03:42 -05001183 unsigned bankw, bankh, mtaspect, tile_split;
1184
Christian Königdf0af442014-03-03 12:38:08 +01001185 evergreen_tiling_fields(reloc->tiling_flags,
Jerome Glisse285484e2011-12-16 17:03:42 -05001186 &bankw, &bankh, &mtaspect,
1187 &tile_split);
Alex Deucherf3a71df2011-11-28 14:49:28 -05001188 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
Jerome Glisse285484e2011-12-16 17:03:42 -05001189 ib[idx] |= DB_TILE_SPLIT(tile_split) |
1190 DB_BANK_WIDTH(bankw) |
1191 DB_BANK_HEIGHT(bankh) |
1192 DB_MACRO_TILE_ASPECT(mtaspect);
Marek Olšáke70f2242011-10-25 01:38:45 +02001193 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001194 }
Marek Olšák30838572012-03-19 03:09:35 +01001195 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001196 break;
1197 case DB_STENCIL_INFO:
1198 track->db_s_info = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001199 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001200 break;
1201 case DB_DEPTH_VIEW:
1202 track->db_depth_view = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001203 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001204 break;
1205 case DB_DEPTH_SIZE:
1206 track->db_depth_size = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001207 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001208 break;
Jerome Glisse285484e2011-12-16 17:03:42 -05001209 case R_02805C_DB_DEPTH_SLICE:
1210 track->db_depth_slice = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001211 track->db_dirty = true;
Jerome Glisse285484e2011-12-16 17:03:42 -05001212 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001213 case DB_Z_READ_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001214 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001215 if (r) {
1216 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1217 "0x%04X\n", reg);
1218 return -EINVAL;
1219 }
1220 track->db_z_read_offset = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001221 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001222 track->db_z_read_bo = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001223 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001224 break;
1225 case DB_Z_WRITE_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001226 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001227 if (r) {
1228 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1229 "0x%04X\n", reg);
1230 return -EINVAL;
1231 }
1232 track->db_z_write_offset = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001233 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001234 track->db_z_write_bo = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001235 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001236 break;
1237 case DB_STENCIL_READ_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001238 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001239 if (r) {
1240 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1241 "0x%04X\n", reg);
1242 return -EINVAL;
1243 }
1244 track->db_s_read_offset = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001245 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001246 track->db_s_read_bo = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001247 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001248 break;
1249 case DB_STENCIL_WRITE_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001250 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001251 if (r) {
1252 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1253 "0x%04X\n", reg);
1254 return -EINVAL;
1255 }
1256 track->db_s_write_offset = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001257 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001258 track->db_s_write_bo = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001259 track->db_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001260 break;
1261 case VGT_STRMOUT_CONFIG:
1262 track->vgt_strmout_config = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001263 track->streamout_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001264 break;
1265 case VGT_STRMOUT_BUFFER_CONFIG:
1266 track->vgt_strmout_buffer_config = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001267 track->streamout_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001268 break;
Marek Olšákdd220a02012-01-27 12:17:59 -05001269 case VGT_STRMOUT_BUFFER_BASE_0:
1270 case VGT_STRMOUT_BUFFER_BASE_1:
1271 case VGT_STRMOUT_BUFFER_BASE_2:
1272 case VGT_STRMOUT_BUFFER_BASE_3:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001273 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05001274 if (r) {
1275 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1276 "0x%04X\n", reg);
1277 return -EINVAL;
1278 }
1279 tmp = (reg - VGT_STRMOUT_BUFFER_BASE_0) / 16;
1280 track->vgt_strmout_bo_offset[tmp] = radeon_get_ib_value(p, idx) << 8;
Christian Königdf0af442014-03-03 12:38:08 +01001281 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Marek Olšákdd220a02012-01-27 12:17:59 -05001282 track->vgt_strmout_bo[tmp] = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001283 track->streamout_dirty = true;
Marek Olšákdd220a02012-01-27 12:17:59 -05001284 break;
1285 case VGT_STRMOUT_BUFFER_SIZE_0:
1286 case VGT_STRMOUT_BUFFER_SIZE_1:
1287 case VGT_STRMOUT_BUFFER_SIZE_2:
1288 case VGT_STRMOUT_BUFFER_SIZE_3:
1289 tmp = (reg - VGT_STRMOUT_BUFFER_SIZE_0) / 16;
1290 /* size in register is DWs, convert to bytes */
1291 track->vgt_strmout_size[tmp] = radeon_get_ib_value(p, idx) * 4;
Marek Olšák30838572012-03-19 03:09:35 +01001292 track->streamout_dirty = true;
Marek Olšákdd220a02012-01-27 12:17:59 -05001293 break;
1294 case CP_COHER_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001295 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05001296 if (r) {
1297 dev_warn(p->dev, "missing reloc for CP_COHER_BASE "
1298 "0x%04X\n", reg);
1299 return -EINVAL;
1300 }
Christian Königdf0af442014-03-03 12:38:08 +01001301 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001302 case CB_TARGET_MASK:
1303 track->cb_target_mask = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001304 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001305 break;
1306 case CB_SHADER_MASK:
1307 track->cb_shader_mask = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001308 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001309 break;
1310 case PA_SC_AA_CONFIG:
Alex Deucherc175ca92011-03-02 20:07:37 -05001311 if (p->rdev->family >= CHIP_CAYMAN) {
1312 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1313 "0x%04X\n", reg);
1314 return -EINVAL;
1315 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001316 tmp = radeon_get_ib_value(p, idx) & MSAA_NUM_SAMPLES_MASK;
1317 track->nsamples = 1 << tmp;
1318 break;
Alex Deucherc175ca92011-03-02 20:07:37 -05001319 case CAYMAN_PA_SC_AA_CONFIG:
1320 if (p->rdev->family < CHIP_CAYMAN) {
1321 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1322 "0x%04X\n", reg);
1323 return -EINVAL;
1324 }
1325 tmp = radeon_get_ib_value(p, idx) & CAYMAN_MSAA_NUM_SAMPLES_MASK;
1326 track->nsamples = 1 << tmp;
1327 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001328 case CB_COLOR0_VIEW:
1329 case CB_COLOR1_VIEW:
1330 case CB_COLOR2_VIEW:
1331 case CB_COLOR3_VIEW:
1332 case CB_COLOR4_VIEW:
1333 case CB_COLOR5_VIEW:
1334 case CB_COLOR6_VIEW:
1335 case CB_COLOR7_VIEW:
1336 tmp = (reg - CB_COLOR0_VIEW) / 0x3c;
1337 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001338 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001339 break;
1340 case CB_COLOR8_VIEW:
1341 case CB_COLOR9_VIEW:
1342 case CB_COLOR10_VIEW:
1343 case CB_COLOR11_VIEW:
1344 tmp = ((reg - CB_COLOR8_VIEW) / 0x1c) + 8;
1345 track->cb_color_view[tmp] = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001346 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001347 break;
1348 case CB_COLOR0_INFO:
1349 case CB_COLOR1_INFO:
1350 case CB_COLOR2_INFO:
1351 case CB_COLOR3_INFO:
1352 case CB_COLOR4_INFO:
1353 case CB_COLOR5_INFO:
1354 case CB_COLOR6_INFO:
1355 case CB_COLOR7_INFO:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001356 tmp = (reg - CB_COLOR0_INFO) / 0x3c;
1357 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx);
Jerome Glisse721604a2012-01-05 22:11:05 -05001358 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05001359 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšáke70f2242011-10-25 01:38:45 +02001360 if (r) {
1361 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1362 "0x%04X\n", reg);
1363 return -EINVAL;
1364 }
Christian Königdf0af442014-03-03 12:38:08 +01001365 ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
1366 track->cb_color_info[tmp] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001367 }
Marek Olšák30838572012-03-19 03:09:35 +01001368 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001369 break;
1370 case CB_COLOR8_INFO:
1371 case CB_COLOR9_INFO:
1372 case CB_COLOR10_INFO:
1373 case CB_COLOR11_INFO:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001374 tmp = ((reg - CB_COLOR8_INFO) / 0x1c) + 8;
1375 track->cb_color_info[tmp] = radeon_get_ib_value(p, idx);
Jerome Glisse721604a2012-01-05 22:11:05 -05001376 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05001377 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšáke70f2242011-10-25 01:38:45 +02001378 if (r) {
1379 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1380 "0x%04X\n", reg);
1381 return -EINVAL;
1382 }
Christian Königdf0af442014-03-03 12:38:08 +01001383 ib[idx] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
1384 track->cb_color_info[tmp] |= CB_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001385 }
Marek Olšák30838572012-03-19 03:09:35 +01001386 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001387 break;
1388 case CB_COLOR0_PITCH:
1389 case CB_COLOR1_PITCH:
1390 case CB_COLOR2_PITCH:
1391 case CB_COLOR3_PITCH:
1392 case CB_COLOR4_PITCH:
1393 case CB_COLOR5_PITCH:
1394 case CB_COLOR6_PITCH:
1395 case CB_COLOR7_PITCH:
1396 tmp = (reg - CB_COLOR0_PITCH) / 0x3c;
1397 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001398 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001399 break;
1400 case CB_COLOR8_PITCH:
1401 case CB_COLOR9_PITCH:
1402 case CB_COLOR10_PITCH:
1403 case CB_COLOR11_PITCH:
1404 tmp = ((reg - CB_COLOR8_PITCH) / 0x1c) + 8;
1405 track->cb_color_pitch[tmp] = radeon_get_ib_value(p, idx);
Marek Olšák30838572012-03-19 03:09:35 +01001406 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001407 break;
1408 case CB_COLOR0_SLICE:
1409 case CB_COLOR1_SLICE:
1410 case CB_COLOR2_SLICE:
1411 case CB_COLOR3_SLICE:
1412 case CB_COLOR4_SLICE:
1413 case CB_COLOR5_SLICE:
1414 case CB_COLOR6_SLICE:
1415 case CB_COLOR7_SLICE:
1416 tmp = (reg - CB_COLOR0_SLICE) / 0x3c;
1417 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx);
Jerome Glissed2609872012-06-09 10:57:41 -04001418 track->cb_color_slice_idx[tmp] = idx;
Marek Olšák30838572012-03-19 03:09:35 +01001419 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001420 break;
1421 case CB_COLOR8_SLICE:
1422 case CB_COLOR9_SLICE:
1423 case CB_COLOR10_SLICE:
1424 case CB_COLOR11_SLICE:
1425 tmp = ((reg - CB_COLOR8_SLICE) / 0x1c) + 8;
1426 track->cb_color_slice[tmp] = radeon_get_ib_value(p, idx);
Jerome Glissed2609872012-06-09 10:57:41 -04001427 track->cb_color_slice_idx[tmp] = idx;
Marek Olšák30838572012-03-19 03:09:35 +01001428 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001429 break;
1430 case CB_COLOR0_ATTRIB:
1431 case CB_COLOR1_ATTRIB:
1432 case CB_COLOR2_ATTRIB:
1433 case CB_COLOR3_ATTRIB:
1434 case CB_COLOR4_ATTRIB:
1435 case CB_COLOR5_ATTRIB:
1436 case CB_COLOR6_ATTRIB:
1437 case CB_COLOR7_ATTRIB:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001438 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Jerome Glisse285484e2011-12-16 17:03:42 -05001439 if (r) {
1440 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1441 "0x%04X\n", reg);
1442 return -EINVAL;
1443 }
1444 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Christian Königdf0af442014-03-03 12:38:08 +01001445 if (reloc->tiling_flags & RADEON_TILING_MACRO) {
Jerome Glisse285484e2011-12-16 17:03:42 -05001446 unsigned bankw, bankh, mtaspect, tile_split;
1447
Christian Königdf0af442014-03-03 12:38:08 +01001448 evergreen_tiling_fields(reloc->tiling_flags,
Jerome Glisse285484e2011-12-16 17:03:42 -05001449 &bankw, &bankh, &mtaspect,
1450 &tile_split);
1451 ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
1452 ib[idx] |= CB_TILE_SPLIT(tile_split) |
1453 CB_BANK_WIDTH(bankw) |
1454 CB_BANK_HEIGHT(bankh) |
1455 CB_MACRO_TILE_ASPECT(mtaspect);
1456 }
1457 }
1458 tmp = ((reg - CB_COLOR0_ATTRIB) / 0x3c);
1459 track->cb_color_attrib[tmp] = ib[idx];
Marek Olšák30838572012-03-19 03:09:35 +01001460 track->cb_dirty = true;
Jerome Glisse285484e2011-12-16 17:03:42 -05001461 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001462 case CB_COLOR8_ATTRIB:
1463 case CB_COLOR9_ATTRIB:
1464 case CB_COLOR10_ATTRIB:
1465 case CB_COLOR11_ATTRIB:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001466 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucherf3a71df2011-11-28 14:49:28 -05001467 if (r) {
1468 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1469 "0x%04X\n", reg);
1470 return -EINVAL;
1471 }
Jerome Glisse285484e2011-12-16 17:03:42 -05001472 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Christian Königdf0af442014-03-03 12:38:08 +01001473 if (reloc->tiling_flags & RADEON_TILING_MACRO) {
Jerome Glisse285484e2011-12-16 17:03:42 -05001474 unsigned bankw, bankh, mtaspect, tile_split;
1475
Christian Königdf0af442014-03-03 12:38:08 +01001476 evergreen_tiling_fields(reloc->tiling_flags,
Jerome Glisse285484e2011-12-16 17:03:42 -05001477 &bankw, &bankh, &mtaspect,
1478 &tile_split);
1479 ib[idx] |= CB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
1480 ib[idx] |= CB_TILE_SPLIT(tile_split) |
1481 CB_BANK_WIDTH(bankw) |
1482 CB_BANK_HEIGHT(bankh) |
1483 CB_MACRO_TILE_ASPECT(mtaspect);
1484 }
Alex Deucherf3a71df2011-11-28 14:49:28 -05001485 }
Jerome Glisse285484e2011-12-16 17:03:42 -05001486 tmp = ((reg - CB_COLOR8_ATTRIB) / 0x1c) + 8;
1487 track->cb_color_attrib[tmp] = ib[idx];
Marek Olšák30838572012-03-19 03:09:35 +01001488 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001489 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001490 case CB_COLOR0_FMASK:
1491 case CB_COLOR1_FMASK:
1492 case CB_COLOR2_FMASK:
1493 case CB_COLOR3_FMASK:
1494 case CB_COLOR4_FMASK:
1495 case CB_COLOR5_FMASK:
1496 case CB_COLOR6_FMASK:
1497 case CB_COLOR7_FMASK:
1498 tmp = (reg - CB_COLOR0_FMASK) / 0x3c;
Ilija Hadzic012e9762013-01-02 18:27:47 -05001499 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001500 if (r) {
1501 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
1502 return -EINVAL;
1503 }
Christian Königdf0af442014-03-03 12:38:08 +01001504 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001505 track->cb_color_fmask_bo[tmp] = reloc->robj;
1506 break;
1507 case CB_COLOR0_CMASK:
1508 case CB_COLOR1_CMASK:
1509 case CB_COLOR2_CMASK:
1510 case CB_COLOR3_CMASK:
1511 case CB_COLOR4_CMASK:
1512 case CB_COLOR5_CMASK:
1513 case CB_COLOR6_CMASK:
1514 case CB_COLOR7_CMASK:
1515 tmp = (reg - CB_COLOR0_CMASK) / 0x3c;
Ilija Hadzic012e9762013-01-02 18:27:47 -05001516 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001517 if (r) {
1518 dev_err(p->dev, "bad SET_CONTEXT_REG 0x%04X\n", reg);
1519 return -EINVAL;
1520 }
Christian Königdf0af442014-03-03 12:38:08 +01001521 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001522 track->cb_color_cmask_bo[tmp] = reloc->robj;
1523 break;
1524 case CB_COLOR0_FMASK_SLICE:
1525 case CB_COLOR1_FMASK_SLICE:
1526 case CB_COLOR2_FMASK_SLICE:
1527 case CB_COLOR3_FMASK_SLICE:
1528 case CB_COLOR4_FMASK_SLICE:
1529 case CB_COLOR5_FMASK_SLICE:
1530 case CB_COLOR6_FMASK_SLICE:
1531 case CB_COLOR7_FMASK_SLICE:
1532 tmp = (reg - CB_COLOR0_FMASK_SLICE) / 0x3c;
1533 track->cb_color_fmask_slice[tmp] = radeon_get_ib_value(p, idx);
1534 break;
1535 case CB_COLOR0_CMASK_SLICE:
1536 case CB_COLOR1_CMASK_SLICE:
1537 case CB_COLOR2_CMASK_SLICE:
1538 case CB_COLOR3_CMASK_SLICE:
1539 case CB_COLOR4_CMASK_SLICE:
1540 case CB_COLOR5_CMASK_SLICE:
1541 case CB_COLOR6_CMASK_SLICE:
1542 case CB_COLOR7_CMASK_SLICE:
1543 tmp = (reg - CB_COLOR0_CMASK_SLICE) / 0x3c;
1544 track->cb_color_cmask_slice[tmp] = radeon_get_ib_value(p, idx);
1545 break;
1546 case CB_COLOR0_BASE:
1547 case CB_COLOR1_BASE:
1548 case CB_COLOR2_BASE:
1549 case CB_COLOR3_BASE:
1550 case CB_COLOR4_BASE:
1551 case CB_COLOR5_BASE:
1552 case CB_COLOR6_BASE:
1553 case CB_COLOR7_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001554 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001555 if (r) {
1556 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1557 "0x%04X\n", reg);
1558 return -EINVAL;
1559 }
1560 tmp = (reg - CB_COLOR0_BASE) / 0x3c;
1561 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001562 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001563 track->cb_color_bo[tmp] = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001564 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001565 break;
1566 case CB_COLOR8_BASE:
1567 case CB_COLOR9_BASE:
1568 case CB_COLOR10_BASE:
1569 case CB_COLOR11_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001570 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001571 if (r) {
1572 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1573 "0x%04X\n", reg);
1574 return -EINVAL;
1575 }
1576 tmp = ((reg - CB_COLOR8_BASE) / 0x1c) + 8;
1577 track->cb_color_bo_offset[tmp] = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001578 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001579 track->cb_color_bo[tmp] = reloc->robj;
Marek Olšák30838572012-03-19 03:09:35 +01001580 track->cb_dirty = true;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001581 break;
Jerome Glisse88f50c82012-03-21 19:18:21 -04001582 case DB_HTILE_DATA_BASE:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001583 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Jerome Glisse88f50c82012-03-21 19:18:21 -04001584 if (r) {
1585 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1586 "0x%04X\n", reg);
1587 return -EINVAL;
1588 }
1589 track->htile_offset = radeon_get_ib_value(p, idx);
Christian Königdf0af442014-03-03 12:38:08 +01001590 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Jerome Glisse88f50c82012-03-21 19:18:21 -04001591 track->htile_bo = reloc->robj;
1592 track->db_dirty = true;
1593 break;
1594 case DB_HTILE_SURFACE:
1595 /* 8x8 only */
1596 track->htile_surface = radeon_get_ib_value(p, idx);
Jerome Glisse4ac05332012-12-13 12:08:11 -05001597 /* force 8x8 htile width and height */
1598 ib[idx] |= 3;
Jerome Glisse88f50c82012-03-21 19:18:21 -04001599 track->db_dirty = true;
1600 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001601 case CB_IMMED0_BASE:
1602 case CB_IMMED1_BASE:
1603 case CB_IMMED2_BASE:
1604 case CB_IMMED3_BASE:
1605 case CB_IMMED4_BASE:
1606 case CB_IMMED5_BASE:
1607 case CB_IMMED6_BASE:
1608 case CB_IMMED7_BASE:
1609 case CB_IMMED8_BASE:
1610 case CB_IMMED9_BASE:
1611 case CB_IMMED10_BASE:
1612 case CB_IMMED11_BASE:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001613 case SQ_PGM_START_FS:
1614 case SQ_PGM_START_ES:
1615 case SQ_PGM_START_VS:
1616 case SQ_PGM_START_GS:
1617 case SQ_PGM_START_PS:
1618 case SQ_PGM_START_HS:
1619 case SQ_PGM_START_LS:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001620 case SQ_CONST_MEM_BASE:
1621 case SQ_ALU_CONST_CACHE_GS_0:
1622 case SQ_ALU_CONST_CACHE_GS_1:
1623 case SQ_ALU_CONST_CACHE_GS_2:
1624 case SQ_ALU_CONST_CACHE_GS_3:
1625 case SQ_ALU_CONST_CACHE_GS_4:
1626 case SQ_ALU_CONST_CACHE_GS_5:
1627 case SQ_ALU_CONST_CACHE_GS_6:
1628 case SQ_ALU_CONST_CACHE_GS_7:
1629 case SQ_ALU_CONST_CACHE_GS_8:
1630 case SQ_ALU_CONST_CACHE_GS_9:
1631 case SQ_ALU_CONST_CACHE_GS_10:
1632 case SQ_ALU_CONST_CACHE_GS_11:
1633 case SQ_ALU_CONST_CACHE_GS_12:
1634 case SQ_ALU_CONST_CACHE_GS_13:
1635 case SQ_ALU_CONST_CACHE_GS_14:
1636 case SQ_ALU_CONST_CACHE_GS_15:
1637 case SQ_ALU_CONST_CACHE_PS_0:
1638 case SQ_ALU_CONST_CACHE_PS_1:
1639 case SQ_ALU_CONST_CACHE_PS_2:
1640 case SQ_ALU_CONST_CACHE_PS_3:
1641 case SQ_ALU_CONST_CACHE_PS_4:
1642 case SQ_ALU_CONST_CACHE_PS_5:
1643 case SQ_ALU_CONST_CACHE_PS_6:
1644 case SQ_ALU_CONST_CACHE_PS_7:
1645 case SQ_ALU_CONST_CACHE_PS_8:
1646 case SQ_ALU_CONST_CACHE_PS_9:
1647 case SQ_ALU_CONST_CACHE_PS_10:
1648 case SQ_ALU_CONST_CACHE_PS_11:
1649 case SQ_ALU_CONST_CACHE_PS_12:
1650 case SQ_ALU_CONST_CACHE_PS_13:
1651 case SQ_ALU_CONST_CACHE_PS_14:
1652 case SQ_ALU_CONST_CACHE_PS_15:
1653 case SQ_ALU_CONST_CACHE_VS_0:
1654 case SQ_ALU_CONST_CACHE_VS_1:
1655 case SQ_ALU_CONST_CACHE_VS_2:
1656 case SQ_ALU_CONST_CACHE_VS_3:
1657 case SQ_ALU_CONST_CACHE_VS_4:
1658 case SQ_ALU_CONST_CACHE_VS_5:
1659 case SQ_ALU_CONST_CACHE_VS_6:
1660 case SQ_ALU_CONST_CACHE_VS_7:
1661 case SQ_ALU_CONST_CACHE_VS_8:
1662 case SQ_ALU_CONST_CACHE_VS_9:
1663 case SQ_ALU_CONST_CACHE_VS_10:
1664 case SQ_ALU_CONST_CACHE_VS_11:
1665 case SQ_ALU_CONST_CACHE_VS_12:
1666 case SQ_ALU_CONST_CACHE_VS_13:
1667 case SQ_ALU_CONST_CACHE_VS_14:
1668 case SQ_ALU_CONST_CACHE_VS_15:
1669 case SQ_ALU_CONST_CACHE_HS_0:
1670 case SQ_ALU_CONST_CACHE_HS_1:
1671 case SQ_ALU_CONST_CACHE_HS_2:
1672 case SQ_ALU_CONST_CACHE_HS_3:
1673 case SQ_ALU_CONST_CACHE_HS_4:
1674 case SQ_ALU_CONST_CACHE_HS_5:
1675 case SQ_ALU_CONST_CACHE_HS_6:
1676 case SQ_ALU_CONST_CACHE_HS_7:
1677 case SQ_ALU_CONST_CACHE_HS_8:
1678 case SQ_ALU_CONST_CACHE_HS_9:
1679 case SQ_ALU_CONST_CACHE_HS_10:
1680 case SQ_ALU_CONST_CACHE_HS_11:
1681 case SQ_ALU_CONST_CACHE_HS_12:
1682 case SQ_ALU_CONST_CACHE_HS_13:
1683 case SQ_ALU_CONST_CACHE_HS_14:
1684 case SQ_ALU_CONST_CACHE_HS_15:
1685 case SQ_ALU_CONST_CACHE_LS_0:
1686 case SQ_ALU_CONST_CACHE_LS_1:
1687 case SQ_ALU_CONST_CACHE_LS_2:
1688 case SQ_ALU_CONST_CACHE_LS_3:
1689 case SQ_ALU_CONST_CACHE_LS_4:
1690 case SQ_ALU_CONST_CACHE_LS_5:
1691 case SQ_ALU_CONST_CACHE_LS_6:
1692 case SQ_ALU_CONST_CACHE_LS_7:
1693 case SQ_ALU_CONST_CACHE_LS_8:
1694 case SQ_ALU_CONST_CACHE_LS_9:
1695 case SQ_ALU_CONST_CACHE_LS_10:
1696 case SQ_ALU_CONST_CACHE_LS_11:
1697 case SQ_ALU_CONST_CACHE_LS_12:
1698 case SQ_ALU_CONST_CACHE_LS_13:
1699 case SQ_ALU_CONST_CACHE_LS_14:
1700 case SQ_ALU_CONST_CACHE_LS_15:
Ilija Hadzic012e9762013-01-02 18:27:47 -05001701 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001702 if (r) {
1703 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1704 "0x%04X\n", reg);
1705 return -EINVAL;
1706 }
Christian Königdf0af442014-03-03 12:38:08 +01001707 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001708 break;
Alex Deucher033b5652011-06-08 15:26:45 -04001709 case SX_MEMORY_EXPORT_BASE:
1710 if (p->rdev->family >= CHIP_CAYMAN) {
1711 dev_warn(p->dev, "bad SET_CONFIG_REG "
1712 "0x%04X\n", reg);
1713 return -EINVAL;
1714 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05001715 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher033b5652011-06-08 15:26:45 -04001716 if (r) {
1717 dev_warn(p->dev, "bad SET_CONFIG_REG "
1718 "0x%04X\n", reg);
1719 return -EINVAL;
1720 }
Christian Königdf0af442014-03-03 12:38:08 +01001721 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deucher033b5652011-06-08 15:26:45 -04001722 break;
1723 case CAYMAN_SX_SCATTER_EXPORT_BASE:
1724 if (p->rdev->family < CHIP_CAYMAN) {
1725 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1726 "0x%04X\n", reg);
1727 return -EINVAL;
1728 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05001729 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher033b5652011-06-08 15:26:45 -04001730 if (r) {
1731 dev_warn(p->dev, "bad SET_CONTEXT_REG "
1732 "0x%04X\n", reg);
1733 return -EINVAL;
1734 }
Christian Königdf0af442014-03-03 12:38:08 +01001735 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deucher033b5652011-06-08 15:26:45 -04001736 break;
Marek Olšák779923b2012-03-08 00:56:00 +01001737 case SX_MISC:
1738 track->sx_misc_kill_all_prims = (radeon_get_ib_value(p, idx) & 0x1) != 0;
1739 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001740 default:
1741 dev_warn(p->dev, "forbidden register 0x%08x at %d\n", reg, idx);
1742 return -EINVAL;
1743 }
1744 return 0;
1745}
1746
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03001747/**
1748 * evergreen_is_safe_reg() - check if register is authorized or not
1749 * @parser: parser structure holding parsing context
1750 * @reg: register we are testing
1751 *
1752 * This function will test against reg_safe_bm and return true
1753 * if register is safe or false otherwise.
1754 */
1755static inline bool evergreen_is_safe_reg(struct radeon_cs_parser *p, u32 reg)
Marek Olšákdd220a02012-01-27 12:17:59 -05001756{
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03001757 struct evergreen_cs_track *track = p->track;
1758 u32 m, i;
Marek Olšákdd220a02012-01-27 12:17:59 -05001759
1760 i = (reg >> 7);
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03001761 if (unlikely(i >= REG_SAFE_BM_SIZE)) {
Marek Olšákdd220a02012-01-27 12:17:59 -05001762 return false;
1763 }
1764 m = 1 << ((reg >> 2) & 31);
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03001765 if (!(track->reg_safe_bm[i] & m))
1766 return true;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03001767
Marek Olšákdd220a02012-01-27 12:17:59 -05001768 return false;
1769}
1770
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001771static int evergreen_packet3_check(struct radeon_cs_parser *p,
1772 struct radeon_cs_packet *pkt)
1773{
Christian König1d0c0942014-11-27 14:48:42 +01001774 struct radeon_bo_list *reloc;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001775 struct evergreen_cs_track *track;
1776 volatile u32 *ib;
1777 unsigned idx;
1778 unsigned i;
1779 unsigned start_reg, end_reg, reg;
1780 int r;
1781 u32 idx_value;
1782
1783 track = (struct evergreen_cs_track *)p->track;
Jerome Glissef2e39222012-05-09 15:35:02 +02001784 ib = p->ib.ptr;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001785 idx = pkt->idx + 1;
1786 idx_value = radeon_get_ib_value(p, idx);
1787
1788 switch (pkt->opcode) {
Dave Airlie2a19cac2011-02-28 16:11:48 +10001789 case PACKET3_SET_PREDICATION:
1790 {
1791 int pred_op;
1792 int tmp;
Marek Olšák78857132012-03-19 03:09:33 +01001793 uint64_t offset;
1794
Dave Airlie2a19cac2011-02-28 16:11:48 +10001795 if (pkt->count != 1) {
1796 DRM_ERROR("bad SET PREDICATION\n");
1797 return -EINVAL;
1798 }
1799
1800 tmp = radeon_get_ib_value(p, idx + 1);
1801 pred_op = (tmp >> 16) & 0x7;
1802
1803 /* for the clear predicate operation */
1804 if (pred_op == 0)
1805 return 0;
1806
1807 if (pred_op > 2) {
1808 DRM_ERROR("bad SET PREDICATION operation %d\n", pred_op);
1809 return -EINVAL;
1810 }
1811
Ilija Hadzic012e9762013-01-02 18:27:47 -05001812 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Dave Airlie2a19cac2011-02-28 16:11:48 +10001813 if (r) {
1814 DRM_ERROR("bad SET PREDICATION\n");
1815 return -EINVAL;
1816 }
1817
Christian Königdf0af442014-03-03 12:38:08 +01001818 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01001819 (idx_value & 0xfffffff0) +
1820 ((u64)(tmp & 0xff) << 32);
1821
1822 ib[idx + 0] = offset;
1823 ib[idx + 1] = (tmp & 0xffffff00) | (upper_32_bits(offset) & 0xff);
Dave Airlie2a19cac2011-02-28 16:11:48 +10001824 }
1825 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001826 case PACKET3_CONTEXT_CONTROL:
1827 if (pkt->count != 1) {
1828 DRM_ERROR("bad CONTEXT_CONTROL\n");
1829 return -EINVAL;
1830 }
1831 break;
1832 case PACKET3_INDEX_TYPE:
1833 case PACKET3_NUM_INSTANCES:
1834 case PACKET3_CLEAR_STATE:
1835 if (pkt->count) {
1836 DRM_ERROR("bad INDEX_TYPE/NUM_INSTANCES/CLEAR_STATE\n");
1837 return -EINVAL;
1838 }
1839 break;
Alex Deucherc175ca92011-03-02 20:07:37 -05001840 case CAYMAN_PACKET3_DEALLOC_STATE:
1841 if (p->rdev->family < CHIP_CAYMAN) {
1842 DRM_ERROR("bad PACKET3_DEALLOC_STATE\n");
1843 return -EINVAL;
1844 }
1845 if (pkt->count) {
1846 DRM_ERROR("bad INDEX_TYPE/NUM_INSTANCES/CLEAR_STATE\n");
1847 return -EINVAL;
1848 }
1849 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001850 case PACKET3_INDEX_BASE:
Marek Olšák78857132012-03-19 03:09:33 +01001851 {
1852 uint64_t offset;
1853
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001854 if (pkt->count != 1) {
1855 DRM_ERROR("bad INDEX_BASE\n");
1856 return -EINVAL;
1857 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05001858 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001859 if (r) {
1860 DRM_ERROR("bad INDEX_BASE\n");
1861 return -EINVAL;
1862 }
Marek Olšák78857132012-03-19 03:09:33 +01001863
Christian Königdf0af442014-03-03 12:38:08 +01001864 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01001865 idx_value +
1866 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32);
1867
1868 ib[idx+0] = offset;
1869 ib[idx+1] = upper_32_bits(offset) & 0xff;
1870
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001871 r = evergreen_cs_track_check(p);
1872 if (r) {
1873 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1874 return r;
1875 }
1876 break;
Marek Olšák78857132012-03-19 03:09:33 +01001877 }
Glenn Kennard16613742014-12-13 03:32:37 +01001878 case PACKET3_INDEX_BUFFER_SIZE:
1879 {
1880 if (pkt->count != 0) {
1881 DRM_ERROR("bad INDEX_BUFFER_SIZE\n");
1882 return -EINVAL;
1883 }
1884 break;
1885 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001886 case PACKET3_DRAW_INDEX:
Marek Olšák78857132012-03-19 03:09:33 +01001887 {
1888 uint64_t offset;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001889 if (pkt->count != 3) {
1890 DRM_ERROR("bad DRAW_INDEX\n");
1891 return -EINVAL;
1892 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05001893 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001894 if (r) {
1895 DRM_ERROR("bad DRAW_INDEX\n");
1896 return -EINVAL;
1897 }
Marek Olšák78857132012-03-19 03:09:33 +01001898
Christian Königdf0af442014-03-03 12:38:08 +01001899 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01001900 idx_value +
1901 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32);
1902
1903 ib[idx+0] = offset;
1904 ib[idx+1] = upper_32_bits(offset) & 0xff;
1905
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001906 r = evergreen_cs_track_check(p);
1907 if (r) {
1908 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1909 return r;
1910 }
1911 break;
Marek Olšák78857132012-03-19 03:09:33 +01001912 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001913 case PACKET3_DRAW_INDEX_2:
Marek Olšák78857132012-03-19 03:09:33 +01001914 {
1915 uint64_t offset;
1916
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001917 if (pkt->count != 4) {
1918 DRM_ERROR("bad DRAW_INDEX_2\n");
1919 return -EINVAL;
1920 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05001921 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001922 if (r) {
1923 DRM_ERROR("bad DRAW_INDEX_2\n");
1924 return -EINVAL;
1925 }
Marek Olšák78857132012-03-19 03:09:33 +01001926
Christian Königdf0af442014-03-03 12:38:08 +01001927 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01001928 radeon_get_ib_value(p, idx+1) +
1929 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32);
1930
1931 ib[idx+1] = offset;
1932 ib[idx+2] = upper_32_bits(offset) & 0xff;
1933
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001934 r = evergreen_cs_track_check(p);
1935 if (r) {
1936 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1937 return r;
1938 }
1939 break;
Marek Olšák78857132012-03-19 03:09:33 +01001940 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04001941 case PACKET3_DRAW_INDEX_AUTO:
1942 if (pkt->count != 1) {
1943 DRM_ERROR("bad DRAW_INDEX_AUTO\n");
1944 return -EINVAL;
1945 }
1946 r = evergreen_cs_track_check(p);
1947 if (r) {
1948 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
1949 return r;
1950 }
1951 break;
1952 case PACKET3_DRAW_INDEX_MULTI_AUTO:
1953 if (pkt->count != 2) {
1954 DRM_ERROR("bad DRAW_INDEX_MULTI_AUTO\n");
1955 return -EINVAL;
1956 }
1957 r = evergreen_cs_track_check(p);
1958 if (r) {
1959 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
1960 return r;
1961 }
1962 break;
1963 case PACKET3_DRAW_INDEX_IMMD:
1964 if (pkt->count < 2) {
1965 DRM_ERROR("bad DRAW_INDEX_IMMD\n");
1966 return -EINVAL;
1967 }
1968 r = evergreen_cs_track_check(p);
1969 if (r) {
1970 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1971 return r;
1972 }
1973 break;
1974 case PACKET3_DRAW_INDEX_OFFSET:
1975 if (pkt->count != 2) {
1976 DRM_ERROR("bad DRAW_INDEX_OFFSET\n");
1977 return -EINVAL;
1978 }
1979 r = evergreen_cs_track_check(p);
1980 if (r) {
1981 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1982 return r;
1983 }
1984 break;
1985 case PACKET3_DRAW_INDEX_OFFSET_2:
1986 if (pkt->count != 3) {
1987 DRM_ERROR("bad DRAW_INDEX_OFFSET_2\n");
1988 return -EINVAL;
1989 }
1990 r = evergreen_cs_track_check(p);
1991 if (r) {
1992 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
1993 return r;
1994 }
1995 break;
Glenn Kennard16613742014-12-13 03:32:37 +01001996 case PACKET3_SET_BASE:
1997 {
1998 /*
1999 DW 1 HEADER Header of the packet. Shader_Type in bit 1 of the Header will correspond to the shader type of the Load, see Type-3 Packet.
2000 2 BASE_INDEX Bits [3:0] BASE_INDEX - Base Index specifies which base address is specified in the last two DWs.
2001 0001: DX11 Draw_Index_Indirect Patch Table Base: Base address for Draw_Index_Indirect data.
2002 3 ADDRESS_LO Bits [31:3] - Lower bits of QWORD-Aligned Address. Bits [2:0] - Reserved
2003 4 ADDRESS_HI Bits [31:8] - Reserved. Bits [7:0] - Upper bits of Address [47:32]
2004 */
2005 if (pkt->count != 2) {
2006 DRM_ERROR("bad SET_BASE\n");
2007 return -EINVAL;
2008 }
2009
2010 /* currently only supporting setting indirect draw buffer base address */
2011 if (idx_value != 1) {
2012 DRM_ERROR("bad SET_BASE\n");
2013 return -EINVAL;
2014 }
2015
2016 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
2017 if (r) {
2018 DRM_ERROR("bad SET_BASE\n");
2019 return -EINVAL;
2020 }
2021
2022 track->indirect_draw_buffer_size = radeon_bo_size(reloc->robj);
2023
2024 ib[idx+1] = reloc->gpu_offset;
2025 ib[idx+2] = upper_32_bits(reloc->gpu_offset) & 0xff;
2026
2027 break;
2028 }
2029 case PACKET3_DRAW_INDIRECT:
2030 case PACKET3_DRAW_INDEX_INDIRECT:
2031 {
2032 u64 size = pkt->opcode == PACKET3_DRAW_INDIRECT ? 16 : 20;
2033
2034 /*
2035 DW 1 HEADER
2036 2 DATA_OFFSET Bits [31:0] + byte aligned offset where the required data structure starts. Bits 1:0 are zero
2037 3 DRAW_INITIATOR Draw Initiator Register. Written to the VGT_DRAW_INITIATOR register for the assigned context
2038 */
2039 if (pkt->count != 1) {
2040 DRM_ERROR("bad DRAW_INDIRECT\n");
2041 return -EINVAL;
2042 }
2043
2044 if (idx_value + size > track->indirect_draw_buffer_size) {
2045 dev_warn(p->dev, "DRAW_INDIRECT buffer too small %u + %llu > %lu\n",
2046 idx_value, size, track->indirect_draw_buffer_size);
2047 return -EINVAL;
2048 }
2049
2050 r = evergreen_cs_track_check(p);
2051 if (r) {
2052 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
2053 return r;
2054 }
2055 break;
2056 }
Alex Deucher033b5652011-06-08 15:26:45 -04002057 case PACKET3_DISPATCH_DIRECT:
2058 if (pkt->count != 3) {
2059 DRM_ERROR("bad DISPATCH_DIRECT\n");
2060 return -EINVAL;
2061 }
2062 r = evergreen_cs_track_check(p);
2063 if (r) {
2064 dev_warn(p->dev, "%s:%d invalid cmd stream %d\n", __func__, __LINE__, idx);
2065 return r;
2066 }
2067 break;
2068 case PACKET3_DISPATCH_INDIRECT:
2069 if (pkt->count != 1) {
2070 DRM_ERROR("bad DISPATCH_INDIRECT\n");
2071 return -EINVAL;
2072 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05002073 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher033b5652011-06-08 15:26:45 -04002074 if (r) {
2075 DRM_ERROR("bad DISPATCH_INDIRECT\n");
2076 return -EINVAL;
2077 }
Christian Königdf0af442014-03-03 12:38:08 +01002078 ib[idx+0] = idx_value + (u32)(reloc->gpu_offset & 0xffffffff);
Alex Deucher033b5652011-06-08 15:26:45 -04002079 r = evergreen_cs_track_check(p);
2080 if (r) {
2081 dev_warn(p->dev, "%s:%d invalid cmd stream\n", __func__, __LINE__);
2082 return r;
2083 }
2084 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002085 case PACKET3_WAIT_REG_MEM:
2086 if (pkt->count != 5) {
2087 DRM_ERROR("bad WAIT_REG_MEM\n");
2088 return -EINVAL;
2089 }
2090 /* bit 4 is reg (0) or mem (1) */
2091 if (idx_value & 0x10) {
Marek Olšák78857132012-03-19 03:09:33 +01002092 uint64_t offset;
2093
Ilija Hadzic012e9762013-01-02 18:27:47 -05002094 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002095 if (r) {
2096 DRM_ERROR("bad WAIT_REG_MEM\n");
2097 return -EINVAL;
2098 }
Marek Olšák78857132012-03-19 03:09:33 +01002099
Christian Königdf0af442014-03-03 12:38:08 +01002100 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01002101 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) +
2102 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32);
2103
2104 ib[idx+1] = (ib[idx+1] & 0x3) | (offset & 0xfffffffc);
2105 ib[idx+2] = upper_32_bits(offset) & 0xff;
Ilija Hadzicd6e18a32013-01-02 18:27:44 -05002106 } else if (idx_value & 0x100) {
2107 DRM_ERROR("cannot use PFP on REG wait\n");
2108 return -EINVAL;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002109 }
2110 break;
Alex Deucher8770b862012-12-03 19:18:30 -05002111 case PACKET3_CP_DMA:
2112 {
2113 u32 command, size, info;
2114 u64 offset, tmp;
2115 if (pkt->count != 4) {
2116 DRM_ERROR("bad CP DMA\n");
2117 return -EINVAL;
2118 }
2119 command = radeon_get_ib_value(p, idx+4);
2120 size = command & 0x1fffff;
2121 info = radeon_get_ib_value(p, idx+1);
Alex Deucher9d89d782012-12-14 00:23:06 -05002122 if ((((info & 0x60000000) >> 29) != 0) || /* src = GDS or DATA */
2123 (((info & 0x00300000) >> 20) != 0) || /* dst = GDS */
2124 ((((info & 0x00300000) >> 20) == 0) &&
2125 (command & PACKET3_CP_DMA_CMD_DAS)) || /* dst = register */
2126 ((((info & 0x60000000) >> 29) == 0) &&
2127 (command & PACKET3_CP_DMA_CMD_SAS))) { /* src = register */
2128 /* non mem to mem copies requires dw aligned count */
2129 if (size % 4) {
2130 DRM_ERROR("CP DMA command requires dw count alignment\n");
2131 return -EINVAL;
2132 }
2133 }
Alex Deucher8770b862012-12-03 19:18:30 -05002134 if (command & PACKET3_CP_DMA_CMD_SAS) {
2135 /* src address space is register */
2136 /* GDS is ok */
2137 if (((info & 0x60000000) >> 29) != 1) {
2138 DRM_ERROR("CP DMA SAS not supported\n");
2139 return -EINVAL;
2140 }
2141 } else {
2142 if (command & PACKET3_CP_DMA_CMD_SAIC) {
2143 DRM_ERROR("CP DMA SAIC only supported for registers\n");
2144 return -EINVAL;
2145 }
2146 /* src address space is memory */
2147 if (((info & 0x60000000) >> 29) == 0) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05002148 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher8770b862012-12-03 19:18:30 -05002149 if (r) {
2150 DRM_ERROR("bad CP DMA SRC\n");
2151 return -EINVAL;
2152 }
2153
2154 tmp = radeon_get_ib_value(p, idx) +
2155 ((u64)(radeon_get_ib_value(p, idx+1) & 0xff) << 32);
2156
Christian Königdf0af442014-03-03 12:38:08 +01002157 offset = reloc->gpu_offset + tmp;
Alex Deucher8770b862012-12-03 19:18:30 -05002158
2159 if ((tmp + size) > radeon_bo_size(reloc->robj)) {
2160 dev_warn(p->dev, "CP DMA src buffer too small (%llu %lu)\n",
2161 tmp + size, radeon_bo_size(reloc->robj));
2162 return -EINVAL;
2163 }
2164
2165 ib[idx] = offset;
2166 ib[idx+1] = (ib[idx+1] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
2167 } else if (((info & 0x60000000) >> 29) != 2) {
2168 DRM_ERROR("bad CP DMA SRC_SEL\n");
2169 return -EINVAL;
2170 }
2171 }
2172 if (command & PACKET3_CP_DMA_CMD_DAS) {
2173 /* dst address space is register */
2174 /* GDS is ok */
2175 if (((info & 0x00300000) >> 20) != 1) {
2176 DRM_ERROR("CP DMA DAS not supported\n");
2177 return -EINVAL;
2178 }
2179 } else {
2180 /* dst address space is memory */
2181 if (command & PACKET3_CP_DMA_CMD_DAIC) {
2182 DRM_ERROR("CP DMA DAIC only supported for registers\n");
2183 return -EINVAL;
2184 }
2185 if (((info & 0x00300000) >> 20) == 0) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05002186 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deucher8770b862012-12-03 19:18:30 -05002187 if (r) {
2188 DRM_ERROR("bad CP DMA DST\n");
2189 return -EINVAL;
2190 }
2191
2192 tmp = radeon_get_ib_value(p, idx+2) +
2193 ((u64)(radeon_get_ib_value(p, idx+3) & 0xff) << 32);
2194
Christian Königdf0af442014-03-03 12:38:08 +01002195 offset = reloc->gpu_offset + tmp;
Alex Deucher8770b862012-12-03 19:18:30 -05002196
2197 if ((tmp + size) > radeon_bo_size(reloc->robj)) {
2198 dev_warn(p->dev, "CP DMA dst buffer too small (%llu %lu)\n",
2199 tmp + size, radeon_bo_size(reloc->robj));
2200 return -EINVAL;
2201 }
2202
2203 ib[idx+2] = offset;
2204 ib[idx+3] = upper_32_bits(offset) & 0xff;
2205 } else {
2206 DRM_ERROR("bad CP DMA DST_SEL\n");
2207 return -EINVAL;
2208 }
2209 }
2210 break;
2211 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002212 case PACKET3_SURFACE_SYNC:
2213 if (pkt->count != 3) {
2214 DRM_ERROR("bad SURFACE_SYNC\n");
2215 return -EINVAL;
2216 }
2217 /* 0xffffffff/0x0 is flush all cache flag */
2218 if (radeon_get_ib_value(p, idx + 1) != 0xffffffff ||
2219 radeon_get_ib_value(p, idx + 2) != 0) {
Ilija Hadzic012e9762013-01-02 18:27:47 -05002220 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002221 if (r) {
2222 DRM_ERROR("bad SURFACE_SYNC\n");
2223 return -EINVAL;
2224 }
Christian Königdf0af442014-03-03 12:38:08 +01002225 ib[idx+2] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002226 }
2227 break;
2228 case PACKET3_EVENT_WRITE:
2229 if (pkt->count != 2 && pkt->count != 0) {
2230 DRM_ERROR("bad EVENT_WRITE\n");
2231 return -EINVAL;
2232 }
2233 if (pkt->count) {
Marek Olšák78857132012-03-19 03:09:33 +01002234 uint64_t offset;
2235
Ilija Hadzic012e9762013-01-02 18:27:47 -05002236 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002237 if (r) {
2238 DRM_ERROR("bad EVENT_WRITE\n");
2239 return -EINVAL;
2240 }
Christian Königdf0af442014-03-03 12:38:08 +01002241 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01002242 (radeon_get_ib_value(p, idx+1) & 0xfffffff8) +
2243 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32);
2244
2245 ib[idx+1] = offset & 0xfffffff8;
2246 ib[idx+2] = upper_32_bits(offset) & 0xff;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002247 }
2248 break;
2249 case PACKET3_EVENT_WRITE_EOP:
Marek Olšák78857132012-03-19 03:09:33 +01002250 {
2251 uint64_t offset;
2252
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002253 if (pkt->count != 4) {
2254 DRM_ERROR("bad EVENT_WRITE_EOP\n");
2255 return -EINVAL;
2256 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05002257 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002258 if (r) {
2259 DRM_ERROR("bad EVENT_WRITE_EOP\n");
2260 return -EINVAL;
2261 }
Marek Olšák78857132012-03-19 03:09:33 +01002262
Christian Königdf0af442014-03-03 12:38:08 +01002263 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01002264 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) +
2265 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32);
2266
2267 ib[idx+1] = offset & 0xfffffffc;
2268 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002269 break;
Marek Olšák78857132012-03-19 03:09:33 +01002270 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002271 case PACKET3_EVENT_WRITE_EOS:
Marek Olšák78857132012-03-19 03:09:33 +01002272 {
2273 uint64_t offset;
2274
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002275 if (pkt->count != 3) {
2276 DRM_ERROR("bad EVENT_WRITE_EOS\n");
2277 return -EINVAL;
2278 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05002279 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002280 if (r) {
2281 DRM_ERROR("bad EVENT_WRITE_EOS\n");
2282 return -EINVAL;
2283 }
Marek Olšák78857132012-03-19 03:09:33 +01002284
Christian Königdf0af442014-03-03 12:38:08 +01002285 offset = reloc->gpu_offset +
Marek Olšák78857132012-03-19 03:09:33 +01002286 (radeon_get_ib_value(p, idx+1) & 0xfffffffc) +
2287 ((u64)(radeon_get_ib_value(p, idx+2) & 0xff) << 32);
2288
2289 ib[idx+1] = offset & 0xfffffffc;
2290 ib[idx+2] = (ib[idx+2] & 0xffffff00) | (upper_32_bits(offset) & 0xff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002291 break;
Marek Olšák78857132012-03-19 03:09:33 +01002292 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002293 case PACKET3_SET_CONFIG_REG:
2294 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
2295 end_reg = 4 * pkt->count + start_reg - 4;
2296 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
2297 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
2298 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
2299 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
2300 return -EINVAL;
2301 }
2302 for (i = 0; i < pkt->count; i++) {
2303 reg = start_reg + (4 * i);
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002304 if (evergreen_is_safe_reg(p, reg))
2305 continue;
2306 r = evergreen_cs_handle_reg(p, reg, idx + 1 + i);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002307 if (r)
2308 return r;
2309 }
2310 break;
2311 case PACKET3_SET_CONTEXT_REG:
2312 start_reg = (idx_value << 2) + PACKET3_SET_CONTEXT_REG_START;
2313 end_reg = 4 * pkt->count + start_reg - 4;
2314 if ((start_reg < PACKET3_SET_CONTEXT_REG_START) ||
2315 (start_reg >= PACKET3_SET_CONTEXT_REG_END) ||
2316 (end_reg >= PACKET3_SET_CONTEXT_REG_END)) {
2317 DRM_ERROR("bad PACKET3_SET_CONTEXT_REG\n");
2318 return -EINVAL;
2319 }
2320 for (i = 0; i < pkt->count; i++) {
2321 reg = start_reg + (4 * i);
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002322 if (evergreen_is_safe_reg(p, reg))
2323 continue;
2324 r = evergreen_cs_handle_reg(p, reg, idx + 1 + i);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002325 if (r)
2326 return r;
2327 }
2328 break;
2329 case PACKET3_SET_RESOURCE:
2330 if (pkt->count % 8) {
2331 DRM_ERROR("bad SET_RESOURCE\n");
2332 return -EINVAL;
2333 }
2334 start_reg = (idx_value << 2) + PACKET3_SET_RESOURCE_START;
2335 end_reg = 4 * pkt->count + start_reg - 4;
2336 if ((start_reg < PACKET3_SET_RESOURCE_START) ||
2337 (start_reg >= PACKET3_SET_RESOURCE_END) ||
2338 (end_reg >= PACKET3_SET_RESOURCE_END)) {
2339 DRM_ERROR("bad SET_RESOURCE\n");
2340 return -EINVAL;
2341 }
2342 for (i = 0; i < (pkt->count / 8); i++) {
2343 struct radeon_bo *texture, *mipmap;
Jerome Glisse285484e2011-12-16 17:03:42 -05002344 u32 toffset, moffset;
Marek Olšák61051af2012-09-25 03:34:01 +02002345 u32 size, offset, mip_address, tex_dim;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002346
2347 switch (G__SQ_CONSTANT_TYPE(radeon_get_ib_value(p, idx+1+(i*8)+7))) {
2348 case SQ_TEX_VTX_VALID_TEXTURE:
2349 /* tex base */
Ilija Hadzic012e9762013-01-02 18:27:47 -05002350 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002351 if (r) {
2352 DRM_ERROR("bad SET_RESOURCE (tex)\n");
2353 return -EINVAL;
2354 }
Jerome Glisse721604a2012-01-05 22:11:05 -05002355 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) {
Alex Deucherf3a71df2011-11-28 14:49:28 -05002356 ib[idx+1+(i*8)+1] |=
Christian Königdf0af442014-03-03 12:38:08 +01002357 TEX_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags));
2358 if (reloc->tiling_flags & RADEON_TILING_MACRO) {
Jerome Glisse285484e2011-12-16 17:03:42 -05002359 unsigned bankw, bankh, mtaspect, tile_split;
2360
Christian Königdf0af442014-03-03 12:38:08 +01002361 evergreen_tiling_fields(reloc->tiling_flags,
Jerome Glisse285484e2011-12-16 17:03:42 -05002362 &bankw, &bankh, &mtaspect,
2363 &tile_split);
2364 ib[idx+1+(i*8)+6] |= TEX_TILE_SPLIT(tile_split);
Alex Deucherf3a71df2011-11-28 14:49:28 -05002365 ib[idx+1+(i*8)+7] |=
Jerome Glisse285484e2011-12-16 17:03:42 -05002366 TEX_BANK_WIDTH(bankw) |
2367 TEX_BANK_HEIGHT(bankh) |
2368 MACRO_TILE_ASPECT(mtaspect) |
Alex Deucherf3a71df2011-11-28 14:49:28 -05002369 TEX_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks));
2370 }
Marek Olšáke70f2242011-10-25 01:38:45 +02002371 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002372 texture = reloc->robj;
Christian Königdf0af442014-03-03 12:38:08 +01002373 toffset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Marek Olšák61051af2012-09-25 03:34:01 +02002374
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002375 /* tex mip base */
Marek Olšák61051af2012-09-25 03:34:01 +02002376 tex_dim = ib[idx+1+(i*8)+0] & 0x7;
2377 mip_address = ib[idx+1+(i*8)+3];
2378
2379 if ((tex_dim == SQ_TEX_DIM_2D_MSAA || tex_dim == SQ_TEX_DIM_2D_ARRAY_MSAA) &&
2380 !mip_address &&
Ilija Hadzic9ffb7a62013-01-02 18:27:42 -05002381 !radeon_cs_packet_next_is_pkt3_nop(p)) {
Marek Olšák61051af2012-09-25 03:34:01 +02002382 /* MIP_ADDRESS should point to FMASK for an MSAA texture.
2383 * It should be 0 if FMASK is disabled. */
2384 moffset = 0;
2385 mipmap = NULL;
2386 } else {
Ilija Hadzic012e9762013-01-02 18:27:47 -05002387 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšák61051af2012-09-25 03:34:01 +02002388 if (r) {
2389 DRM_ERROR("bad SET_RESOURCE (tex)\n");
2390 return -EINVAL;
2391 }
Christian Königdf0af442014-03-03 12:38:08 +01002392 moffset = (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
Marek Olšák61051af2012-09-25 03:34:01 +02002393 mipmap = reloc->robj;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002394 }
Marek Olšák61051af2012-09-25 03:34:01 +02002395
Jerome Glisse285484e2011-12-16 17:03:42 -05002396 r = evergreen_cs_track_validate_texture(p, texture, mipmap, idx+1+(i*8));
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002397 if (r)
2398 return r;
Jerome Glisse285484e2011-12-16 17:03:42 -05002399 ib[idx+1+(i*8)+2] += toffset;
2400 ib[idx+1+(i*8)+3] += moffset;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002401 break;
2402 case SQ_TEX_VTX_VALID_BUFFER:
Marek Olšák78857132012-03-19 03:09:33 +01002403 {
2404 uint64_t offset64;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002405 /* vtx base */
Ilija Hadzic012e9762013-01-02 18:27:47 -05002406 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002407 if (r) {
2408 DRM_ERROR("bad SET_RESOURCE (vtx)\n");
2409 return -EINVAL;
2410 }
2411 offset = radeon_get_ib_value(p, idx+1+(i*8)+0);
2412 size = radeon_get_ib_value(p, idx+1+(i*8)+1);
2413 if (p->rdev && (size + offset) > radeon_bo_size(reloc->robj)) {
2414 /* force size to size of the buffer */
2415 dev_warn(p->dev, "vbo resource seems too big for the bo\n");
Marek Olšák78857132012-03-19 03:09:33 +01002416 ib[idx+1+(i*8)+1] = radeon_bo_size(reloc->robj) - offset;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002417 }
Marek Olšák78857132012-03-19 03:09:33 +01002418
Christian Königdf0af442014-03-03 12:38:08 +01002419 offset64 = reloc->gpu_offset + offset;
Marek Olšák78857132012-03-19 03:09:33 +01002420 ib[idx+1+(i*8)+0] = offset64;
2421 ib[idx+1+(i*8)+2] = (ib[idx+1+(i*8)+2] & 0xffffff00) |
2422 (upper_32_bits(offset64) & 0xff);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002423 break;
Marek Olšák78857132012-03-19 03:09:33 +01002424 }
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002425 case SQ_TEX_VTX_INVALID_TEXTURE:
2426 case SQ_TEX_VTX_INVALID_BUFFER:
2427 default:
2428 DRM_ERROR("bad SET_RESOURCE\n");
2429 return -EINVAL;
2430 }
2431 }
2432 break;
2433 case PACKET3_SET_ALU_CONST:
2434 /* XXX fix me ALU const buffers only */
2435 break;
2436 case PACKET3_SET_BOOL_CONST:
2437 start_reg = (idx_value << 2) + PACKET3_SET_BOOL_CONST_START;
2438 end_reg = 4 * pkt->count + start_reg - 4;
2439 if ((start_reg < PACKET3_SET_BOOL_CONST_START) ||
2440 (start_reg >= PACKET3_SET_BOOL_CONST_END) ||
2441 (end_reg >= PACKET3_SET_BOOL_CONST_END)) {
2442 DRM_ERROR("bad SET_BOOL_CONST\n");
2443 return -EINVAL;
2444 }
2445 break;
2446 case PACKET3_SET_LOOP_CONST:
2447 start_reg = (idx_value << 2) + PACKET3_SET_LOOP_CONST_START;
2448 end_reg = 4 * pkt->count + start_reg - 4;
2449 if ((start_reg < PACKET3_SET_LOOP_CONST_START) ||
2450 (start_reg >= PACKET3_SET_LOOP_CONST_END) ||
2451 (end_reg >= PACKET3_SET_LOOP_CONST_END)) {
2452 DRM_ERROR("bad SET_LOOP_CONST\n");
2453 return -EINVAL;
2454 }
2455 break;
2456 case PACKET3_SET_CTL_CONST:
2457 start_reg = (idx_value << 2) + PACKET3_SET_CTL_CONST_START;
2458 end_reg = 4 * pkt->count + start_reg - 4;
2459 if ((start_reg < PACKET3_SET_CTL_CONST_START) ||
2460 (start_reg >= PACKET3_SET_CTL_CONST_END) ||
2461 (end_reg >= PACKET3_SET_CTL_CONST_END)) {
2462 DRM_ERROR("bad SET_CTL_CONST\n");
2463 return -EINVAL;
2464 }
2465 break;
2466 case PACKET3_SET_SAMPLER:
2467 if (pkt->count % 3) {
2468 DRM_ERROR("bad SET_SAMPLER\n");
2469 return -EINVAL;
2470 }
2471 start_reg = (idx_value << 2) + PACKET3_SET_SAMPLER_START;
2472 end_reg = 4 * pkt->count + start_reg - 4;
2473 if ((start_reg < PACKET3_SET_SAMPLER_START) ||
2474 (start_reg >= PACKET3_SET_SAMPLER_END) ||
2475 (end_reg >= PACKET3_SET_SAMPLER_END)) {
2476 DRM_ERROR("bad SET_SAMPLER\n");
2477 return -EINVAL;
2478 }
2479 break;
Marek Olšákdd220a02012-01-27 12:17:59 -05002480 case PACKET3_STRMOUT_BUFFER_UPDATE:
2481 if (pkt->count != 4) {
2482 DRM_ERROR("bad STRMOUT_BUFFER_UPDATE (invalid count)\n");
2483 return -EINVAL;
2484 }
2485 /* Updating memory at DST_ADDRESS. */
2486 if (idx_value & 0x1) {
2487 u64 offset;
Ilija Hadzic012e9762013-01-02 18:27:47 -05002488 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05002489 if (r) {
2490 DRM_ERROR("bad STRMOUT_BUFFER_UPDATE (missing dst reloc)\n");
2491 return -EINVAL;
2492 }
2493 offset = radeon_get_ib_value(p, idx+1);
2494 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32;
2495 if ((offset + 4) > radeon_bo_size(reloc->robj)) {
2496 DRM_ERROR("bad STRMOUT_BUFFER_UPDATE dst bo too small: 0x%llx, 0x%lx\n",
2497 offset + 4, radeon_bo_size(reloc->robj));
2498 return -EINVAL;
2499 }
Christian Königdf0af442014-03-03 12:38:08 +01002500 offset += reloc->gpu_offset;
Marek Olšák78857132012-03-19 03:09:33 +01002501 ib[idx+1] = offset;
2502 ib[idx+2] = upper_32_bits(offset) & 0xff;
Marek Olšákdd220a02012-01-27 12:17:59 -05002503 }
2504 /* Reading data from SRC_ADDRESS. */
2505 if (((idx_value >> 1) & 0x3) == 2) {
2506 u64 offset;
Ilija Hadzic012e9762013-01-02 18:27:47 -05002507 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05002508 if (r) {
2509 DRM_ERROR("bad STRMOUT_BUFFER_UPDATE (missing src reloc)\n");
2510 return -EINVAL;
2511 }
2512 offset = radeon_get_ib_value(p, idx+3);
2513 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32;
2514 if ((offset + 4) > radeon_bo_size(reloc->robj)) {
2515 DRM_ERROR("bad STRMOUT_BUFFER_UPDATE src bo too small: 0x%llx, 0x%lx\n",
2516 offset + 4, radeon_bo_size(reloc->robj));
2517 return -EINVAL;
2518 }
Christian Königdf0af442014-03-03 12:38:08 +01002519 offset += reloc->gpu_offset;
Marek Olšák78857132012-03-19 03:09:33 +01002520 ib[idx+3] = offset;
2521 ib[idx+4] = upper_32_bits(offset) & 0xff;
Marek Olšákdd220a02012-01-27 12:17:59 -05002522 }
2523 break;
Jerome Glisse4613ca12012-12-19 12:26:45 -05002524 case PACKET3_MEM_WRITE:
2525 {
2526 u64 offset;
2527
2528 if (pkt->count != 3) {
2529 DRM_ERROR("bad MEM_WRITE (invalid count)\n");
2530 return -EINVAL;
2531 }
Ilija Hadzic012e9762013-01-02 18:27:47 -05002532 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Jerome Glisse4613ca12012-12-19 12:26:45 -05002533 if (r) {
2534 DRM_ERROR("bad MEM_WRITE (missing reloc)\n");
2535 return -EINVAL;
2536 }
2537 offset = radeon_get_ib_value(p, idx+0);
2538 offset += ((u64)(radeon_get_ib_value(p, idx+1) & 0xff)) << 32UL;
2539 if (offset & 0x7) {
2540 DRM_ERROR("bad MEM_WRITE (address not qwords aligned)\n");
2541 return -EINVAL;
2542 }
2543 if ((offset + 8) > radeon_bo_size(reloc->robj)) {
2544 DRM_ERROR("bad MEM_WRITE bo too small: 0x%llx, 0x%lx\n",
2545 offset + 8, radeon_bo_size(reloc->robj));
2546 return -EINVAL;
2547 }
Christian Königdf0af442014-03-03 12:38:08 +01002548 offset += reloc->gpu_offset;
Jerome Glisse4613ca12012-12-19 12:26:45 -05002549 ib[idx+0] = offset;
2550 ib[idx+1] = upper_32_bits(offset) & 0xff;
2551 break;
2552 }
Marek Olšákdd220a02012-01-27 12:17:59 -05002553 case PACKET3_COPY_DW:
2554 if (pkt->count != 4) {
2555 DRM_ERROR("bad COPY_DW (invalid count)\n");
2556 return -EINVAL;
2557 }
2558 if (idx_value & 0x1) {
2559 u64 offset;
2560 /* SRC is memory. */
Ilija Hadzic012e9762013-01-02 18:27:47 -05002561 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05002562 if (r) {
2563 DRM_ERROR("bad COPY_DW (missing src reloc)\n");
2564 return -EINVAL;
2565 }
2566 offset = radeon_get_ib_value(p, idx+1);
2567 offset += ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32;
2568 if ((offset + 4) > radeon_bo_size(reloc->robj)) {
2569 DRM_ERROR("bad COPY_DW src bo too small: 0x%llx, 0x%lx\n",
2570 offset + 4, radeon_bo_size(reloc->robj));
2571 return -EINVAL;
2572 }
Christian Königdf0af442014-03-03 12:38:08 +01002573 offset += reloc->gpu_offset;
Marek Olšák78857132012-03-19 03:09:33 +01002574 ib[idx+1] = offset;
2575 ib[idx+2] = upper_32_bits(offset) & 0xff;
Marek Olšákdd220a02012-01-27 12:17:59 -05002576 } else {
2577 /* SRC is a reg. */
2578 reg = radeon_get_ib_value(p, idx+1) << 2;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002579 if (!evergreen_is_safe_reg(p, reg)) {
2580 dev_warn(p->dev, "forbidden register 0x%08x at %d\n",
2581 reg, idx + 1);
Marek Olšákdd220a02012-01-27 12:17:59 -05002582 return -EINVAL;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002583 }
Marek Olšákdd220a02012-01-27 12:17:59 -05002584 }
2585 if (idx_value & 0x2) {
2586 u64 offset;
2587 /* DST is memory. */
Ilija Hadzic012e9762013-01-02 18:27:47 -05002588 r = radeon_cs_packet_next_reloc(p, &reloc, 0);
Marek Olšákdd220a02012-01-27 12:17:59 -05002589 if (r) {
2590 DRM_ERROR("bad COPY_DW (missing dst reloc)\n");
2591 return -EINVAL;
2592 }
2593 offset = radeon_get_ib_value(p, idx+3);
2594 offset += ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32;
2595 if ((offset + 4) > radeon_bo_size(reloc->robj)) {
2596 DRM_ERROR("bad COPY_DW dst bo too small: 0x%llx, 0x%lx\n",
2597 offset + 4, radeon_bo_size(reloc->robj));
2598 return -EINVAL;
2599 }
Christian Königdf0af442014-03-03 12:38:08 +01002600 offset += reloc->gpu_offset;
Marek Olšák78857132012-03-19 03:09:33 +01002601 ib[idx+3] = offset;
2602 ib[idx+4] = upper_32_bits(offset) & 0xff;
Marek Olšákdd220a02012-01-27 12:17:59 -05002603 } else {
2604 /* DST is a reg. */
2605 reg = radeon_get_ib_value(p, idx+3) << 2;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002606 if (!evergreen_is_safe_reg(p, reg)) {
2607 dev_warn(p->dev, "forbidden register 0x%08x at %d\n",
2608 reg, idx + 3);
Marek Olšákdd220a02012-01-27 12:17:59 -05002609 return -EINVAL;
Grazvydas Ignotase5b69da2015-08-23 03:57:36 +03002610 }
Marek Olšákdd220a02012-01-27 12:17:59 -05002611 }
2612 break;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002613 case PACKET3_NOP:
2614 break;
2615 default:
2616 DRM_ERROR("Packet3 opcode %x not supported\n", pkt->opcode);
2617 return -EINVAL;
2618 }
2619 return 0;
2620}
2621
2622int evergreen_cs_parse(struct radeon_cs_parser *p)
2623{
2624 struct radeon_cs_packet pkt;
2625 struct evergreen_cs_track *track;
Alex Deucherf3a71df2011-11-28 14:49:28 -05002626 u32 tmp;
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002627 int r;
2628
2629 if (p->track == NULL) {
2630 /* initialize tracker, we are in kms */
2631 track = kzalloc(sizeof(*track), GFP_KERNEL);
2632 if (track == NULL)
2633 return -ENOMEM;
2634 evergreen_cs_track_init(track);
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03002635 if (p->rdev->family >= CHIP_CAYMAN) {
Alex Deucherf3a71df2011-11-28 14:49:28 -05002636 tmp = p->rdev->config.cayman.tile_config;
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03002637 track->reg_safe_bm = cayman_reg_safe_bm;
2638 } else {
Alex Deucherf3a71df2011-11-28 14:49:28 -05002639 tmp = p->rdev->config.evergreen.tile_config;
Grazvydas Ignotas4a985352015-08-23 03:57:35 +03002640 track->reg_safe_bm = evergreen_reg_safe_bm;
2641 }
2642 BUILD_BUG_ON(ARRAY_SIZE(cayman_reg_safe_bm) != REG_SAFE_BM_SIZE);
2643 BUILD_BUG_ON(ARRAY_SIZE(evergreen_reg_safe_bm) != REG_SAFE_BM_SIZE);
Alex Deucherf3a71df2011-11-28 14:49:28 -05002644 switch (tmp & 0xf) {
2645 case 0:
2646 track->npipes = 1;
2647 break;
2648 case 1:
2649 default:
2650 track->npipes = 2;
2651 break;
2652 case 2:
2653 track->npipes = 4;
2654 break;
2655 case 3:
2656 track->npipes = 8;
2657 break;
2658 }
2659
2660 switch ((tmp & 0xf0) >> 4) {
2661 case 0:
2662 track->nbanks = 4;
2663 break;
2664 case 1:
2665 default:
2666 track->nbanks = 8;
2667 break;
2668 case 2:
2669 track->nbanks = 16;
2670 break;
2671 }
2672
2673 switch ((tmp & 0xf00) >> 8) {
2674 case 0:
2675 track->group_size = 256;
2676 break;
2677 case 1:
2678 default:
2679 track->group_size = 512;
2680 break;
2681 }
2682
2683 switch ((tmp & 0xf000) >> 12) {
2684 case 0:
2685 track->row_size = 1;
2686 break;
2687 case 1:
2688 default:
2689 track->row_size = 2;
2690 break;
2691 case 2:
2692 track->row_size = 4;
2693 break;
2694 }
2695
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002696 p->track = track;
2697 }
2698 do {
Ilija Hadzicc38f34b2013-01-02 18:27:41 -05002699 r = radeon_cs_packet_parse(p, &pkt, p->idx);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002700 if (r) {
2701 kfree(p->track);
2702 p->track = NULL;
2703 return r;
2704 }
2705 p->idx += pkt.count + 2;
2706 switch (pkt.type) {
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05002707 case RADEON_PACKET_TYPE0:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002708 r = evergreen_cs_parse_packet0(p, &pkt);
2709 break;
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05002710 case RADEON_PACKET_TYPE2:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002711 break;
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05002712 case RADEON_PACKET_TYPE3:
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002713 r = evergreen_packet3_check(p, &pkt);
2714 break;
2715 default:
2716 DRM_ERROR("Unknown packet type %d !\n", pkt.type);
2717 kfree(p->track);
2718 p->track = NULL;
2719 return -EINVAL;
2720 }
2721 if (r) {
2722 kfree(p->track);
2723 p->track = NULL;
2724 return r;
2725 }
Christian König6d2d13d2014-12-03 15:53:24 +01002726 } while (p->idx < p->chunk_ib->length_dw);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002727#if 0
Jerome Glissef2e39222012-05-09 15:35:02 +02002728 for (r = 0; r < p->ib.length_dw; r++) {
2729 printk(KERN_INFO "%05d 0x%08X\n", r, p->ib.ptr[r]);
Alex Deuchercb5fcbd2010-05-28 19:01:35 -04002730 mdelay(1);
2731 }
2732#endif
2733 kfree(p->track);
2734 p->track = NULL;
2735 return 0;
2736}
2737
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002738/**
2739 * evergreen_dma_cs_parse() - parse the DMA IB
2740 * @p: parser structure holding parsing context.
2741 *
2742 * Parses the DMA IB from the CS ioctl and updates
2743 * the GPU addresses based on the reloc information and
2744 * checks for errors. (Evergreen-Cayman)
2745 * Returns 0 for success and an error on failure.
2746 **/
2747int evergreen_dma_cs_parse(struct radeon_cs_parser *p)
2748{
Christian König6d2d13d2014-12-03 15:53:24 +01002749 struct radeon_cs_chunk *ib_chunk = p->chunk_ib;
Christian König1d0c0942014-11-27 14:48:42 +01002750 struct radeon_bo_list *src_reloc, *dst_reloc, *dst2_reloc;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002751 u32 header, cmd, count, sub_cmd;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002752 volatile u32 *ib = p->ib.ptr;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002753 u32 idx;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002754 u64 src_offset, dst_offset, dst2_offset;
2755 int r;
2756
2757 do {
2758 if (p->idx >= ib_chunk->length_dw) {
2759 DRM_ERROR("Can not parse packet at %d after CS end %d !\n",
2760 p->idx, ib_chunk->length_dw);
2761 return -EINVAL;
2762 }
2763 idx = p->idx;
2764 header = radeon_get_ib_value(p, idx);
2765 cmd = GET_DMA_CMD(header);
2766 count = GET_DMA_COUNT(header);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002767 sub_cmd = GET_DMA_SUB_CMD(header);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002768
2769 switch (cmd) {
2770 case DMA_PACKET_WRITE:
2771 r = r600_dma_cs_next_reloc(p, &dst_reloc);
2772 if (r) {
2773 DRM_ERROR("bad DMA_PACKET_WRITE\n");
2774 return -EINVAL;
2775 }
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002776 switch (sub_cmd) {
2777 /* tiled */
2778 case 8:
Jerome Glissede0babd2013-02-11 08:57:18 -05002779 dst_offset = radeon_get_ib_value(p, idx+1);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002780 dst_offset <<= 8;
2781
Christian Königdf0af442014-03-03 12:38:08 +01002782 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002783 p->idx += count + 7;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002784 break;
2785 /* linear */
2786 case 0:
Jerome Glissede0babd2013-02-11 08:57:18 -05002787 dst_offset = radeon_get_ib_value(p, idx+1);
2788 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+2) & 0xff)) << 32;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002789
Christian Königdf0af442014-03-03 12:38:08 +01002790 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
2791 ib[idx+2] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002792 p->idx += count + 3;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002793 break;
2794 default:
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002795 DRM_ERROR("bad DMA_PACKET_WRITE [%6d] 0x%08x sub cmd is not 0 or 8\n", idx, header);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002796 return -EINVAL;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002797 }
2798 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
2799 dev_warn(p->dev, "DMA write buffer too small (%llu %lu)\n",
2800 dst_offset, radeon_bo_size(dst_reloc->robj));
2801 return -EINVAL;
2802 }
2803 break;
2804 case DMA_PACKET_COPY:
2805 r = r600_dma_cs_next_reloc(p, &src_reloc);
2806 if (r) {
2807 DRM_ERROR("bad DMA_PACKET_COPY\n");
2808 return -EINVAL;
2809 }
2810 r = r600_dma_cs_next_reloc(p, &dst_reloc);
2811 if (r) {
2812 DRM_ERROR("bad DMA_PACKET_COPY\n");
2813 return -EINVAL;
2814 }
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002815 switch (sub_cmd) {
2816 /* Copy L2L, DW aligned */
2817 case 0x00:
2818 /* L2L, dw */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002819 src_offset = radeon_get_ib_value(p, idx+2);
2820 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32;
2821 dst_offset = radeon_get_ib_value(p, idx+1);
2822 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002823 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
2824 dev_warn(p->dev, "DMA L2L, dw src buffer too small (%llu %lu)\n",
2825 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
2826 return -EINVAL;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002827 }
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002828 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
2829 dev_warn(p->dev, "DMA L2L, dw dst buffer too small (%llu %lu)\n",
2830 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
2831 return -EINVAL;
2832 }
Christian Königdf0af442014-03-03 12:38:08 +01002833 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
2834 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
2835 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
2836 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002837 p->idx += 5;
2838 break;
2839 /* Copy L2T/T2L */
2840 case 0x08:
2841 /* detile bit */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002842 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002843 /* tiled src, linear dst */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002844 src_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002845 src_offset <<= 8;
Christian Königdf0af442014-03-03 12:38:08 +01002846 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002847
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002848 dst_offset = radeon_get_ib_value(p, idx + 7);
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002849 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32;
Christian Königdf0af442014-03-03 12:38:08 +01002850 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
2851 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002852 } else {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002853 /* linear src, tiled dst */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002854 src_offset = radeon_get_ib_value(p, idx+7);
2855 src_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32;
Christian Königdf0af442014-03-03 12:38:08 +01002856 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
2857 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002858
Jerome Glissede0babd2013-02-11 08:57:18 -05002859 dst_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002860 dst_offset <<= 8;
Christian Königdf0af442014-03-03 12:38:08 +01002861 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05002862 }
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002863 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
2864 dev_warn(p->dev, "DMA L2T, src buffer too small (%llu %lu)\n",
2865 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
2866 return -EINVAL;
2867 }
2868 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
2869 dev_warn(p->dev, "DMA L2T, dst buffer too small (%llu %lu)\n",
2870 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
2871 return -EINVAL;
2872 }
2873 p->idx += 9;
2874 break;
2875 /* Copy L2L, byte aligned */
2876 case 0x40:
2877 /* L2L, byte */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002878 src_offset = radeon_get_ib_value(p, idx+2);
2879 src_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32;
2880 dst_offset = radeon_get_ib_value(p, idx+1);
2881 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002882 if ((src_offset + count) > radeon_bo_size(src_reloc->robj)) {
2883 dev_warn(p->dev, "DMA L2L, byte src buffer too small (%llu %lu)\n",
2884 src_offset + count, radeon_bo_size(src_reloc->robj));
2885 return -EINVAL;
2886 }
2887 if ((dst_offset + count) > radeon_bo_size(dst_reloc->robj)) {
2888 dev_warn(p->dev, "DMA L2L, byte dst buffer too small (%llu %lu)\n",
2889 dst_offset + count, radeon_bo_size(dst_reloc->robj));
2890 return -EINVAL;
2891 }
Christian Königdf0af442014-03-03 12:38:08 +01002892 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xffffffff);
2893 ib[idx+2] += (u32)(src_reloc->gpu_offset & 0xffffffff);
2894 ib[idx+3] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
2895 ib[idx+4] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002896 p->idx += 5;
2897 break;
2898 /* Copy L2L, partial */
2899 case 0x41:
2900 /* L2L, partial */
2901 if (p->family < CHIP_CAYMAN) {
2902 DRM_ERROR("L2L Partial is cayman only !\n");
2903 return -EINVAL;
2904 }
Christian Königdf0af442014-03-03 12:38:08 +01002905 ib[idx+1] += (u32)(src_reloc->gpu_offset & 0xffffffff);
2906 ib[idx+2] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
2907 ib[idx+4] += (u32)(dst_reloc->gpu_offset & 0xffffffff);
2908 ib[idx+5] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002909
2910 p->idx += 9;
2911 break;
2912 /* Copy L2L, DW aligned, broadcast */
2913 case 0x44:
2914 /* L2L, dw, broadcast */
2915 r = r600_dma_cs_next_reloc(p, &dst2_reloc);
2916 if (r) {
2917 DRM_ERROR("bad L2L, dw, broadcast DMA_PACKET_COPY\n");
2918 return -EINVAL;
2919 }
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002920 dst_offset = radeon_get_ib_value(p, idx+1);
2921 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+4) & 0xff)) << 32;
2922 dst2_offset = radeon_get_ib_value(p, idx+2);
2923 dst2_offset |= ((u64)(radeon_get_ib_value(p, idx+5) & 0xff)) << 32;
2924 src_offset = radeon_get_ib_value(p, idx+3);
2925 src_offset |= ((u64)(radeon_get_ib_value(p, idx+6) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002926 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
2927 dev_warn(p->dev, "DMA L2L, dw, broadcast src buffer too small (%llu %lu)\n",
2928 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
2929 return -EINVAL;
2930 }
2931 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
2932 dev_warn(p->dev, "DMA L2L, dw, broadcast dst buffer too small (%llu %lu)\n",
2933 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
2934 return -EINVAL;
2935 }
2936 if ((dst2_offset + (count * 4)) > radeon_bo_size(dst2_reloc->robj)) {
2937 dev_warn(p->dev, "DMA L2L, dw, broadcast dst2 buffer too small (%llu %lu)\n",
2938 dst2_offset + (count * 4), radeon_bo_size(dst2_reloc->robj));
2939 return -EINVAL;
2940 }
Christian Königdf0af442014-03-03 12:38:08 +01002941 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
2942 ib[idx+2] += (u32)(dst2_reloc->gpu_offset & 0xfffffffc);
2943 ib[idx+3] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
2944 ib[idx+4] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
2945 ib[idx+5] += upper_32_bits(dst2_reloc->gpu_offset) & 0xff;
2946 ib[idx+6] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002947 p->idx += 7;
2948 break;
2949 /* Copy L2T Frame to Field */
2950 case 0x48:
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002951 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002952 DRM_ERROR("bad L2T, frame to fields DMA_PACKET_COPY\n");
2953 return -EINVAL;
2954 }
2955 r = r600_dma_cs_next_reloc(p, &dst2_reloc);
2956 if (r) {
2957 DRM_ERROR("bad L2T, frame to fields DMA_PACKET_COPY\n");
2958 return -EINVAL;
2959 }
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002960 dst_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002961 dst_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002962 dst2_offset = radeon_get_ib_value(p, idx+2);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002963 dst2_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002964 src_offset = radeon_get_ib_value(p, idx+8);
2965 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002966 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
2967 dev_warn(p->dev, "DMA L2T, frame to fields src buffer too small (%llu %lu)\n",
2968 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
2969 return -EINVAL;
2970 }
2971 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
2972 dev_warn(p->dev, "DMA L2T, frame to fields buffer too small (%llu %lu)\n",
2973 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
2974 return -EINVAL;
2975 }
2976 if ((dst2_offset + (count * 4)) > radeon_bo_size(dst2_reloc->robj)) {
2977 dev_warn(p->dev, "DMA L2T, frame to fields buffer too small (%llu %lu)\n",
2978 dst2_offset + (count * 4), radeon_bo_size(dst2_reloc->robj));
2979 return -EINVAL;
2980 }
Christian Königdf0af442014-03-03 12:38:08 +01002981 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
2982 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
2983 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
2984 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002985 p->idx += 10;
2986 break;
2987 /* Copy L2T/T2L, partial */
2988 case 0x49:
2989 /* L2T, T2L partial */
2990 if (p->family < CHIP_CAYMAN) {
2991 DRM_ERROR("L2T, T2L Partial is cayman only !\n");
2992 return -EINVAL;
2993 }
2994 /* detile bit */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08002995 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002996 /* tiled src, linear dst */
Christian Königdf0af442014-03-03 12:38:08 +01002997 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05002998
Christian Königdf0af442014-03-03 12:38:08 +01002999 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
3000 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003001 } else {
3002 /* linear src, tiled dst */
Christian Königdf0af442014-03-03 12:38:08 +01003003 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
3004 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003005
Christian Königdf0af442014-03-03 12:38:08 +01003006 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003007 }
3008 p->idx += 12;
3009 break;
3010 /* Copy L2T broadcast */
3011 case 0x4b:
3012 /* L2T, broadcast */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003013 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003014 DRM_ERROR("bad L2T, broadcast DMA_PACKET_COPY\n");
3015 return -EINVAL;
3016 }
3017 r = r600_dma_cs_next_reloc(p, &dst2_reloc);
3018 if (r) {
3019 DRM_ERROR("bad L2T, broadcast DMA_PACKET_COPY\n");
3020 return -EINVAL;
3021 }
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003022 dst_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003023 dst_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003024 dst2_offset = radeon_get_ib_value(p, idx+2);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003025 dst2_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003026 src_offset = radeon_get_ib_value(p, idx+8);
3027 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003028 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3029 dev_warn(p->dev, "DMA L2T, broadcast src buffer too small (%llu %lu)\n",
3030 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3031 return -EINVAL;
3032 }
3033 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
3034 dev_warn(p->dev, "DMA L2T, broadcast dst buffer too small (%llu %lu)\n",
3035 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
3036 return -EINVAL;
3037 }
3038 if ((dst2_offset + (count * 4)) > radeon_bo_size(dst2_reloc->robj)) {
3039 dev_warn(p->dev, "DMA L2T, broadcast dst2 buffer too small (%llu %lu)\n",
3040 dst2_offset + (count * 4), radeon_bo_size(dst2_reloc->robj));
3041 return -EINVAL;
3042 }
Christian Königdf0af442014-03-03 12:38:08 +01003043 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
3044 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
3045 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
3046 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003047 p->idx += 10;
3048 break;
3049 /* Copy L2T/T2L (tile units) */
3050 case 0x4c:
3051 /* L2T, T2L */
3052 /* detile bit */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003053 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003054 /* tiled src, linear dst */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003055 src_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003056 src_offset <<= 8;
Christian Königdf0af442014-03-03 12:38:08 +01003057 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003058
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003059 dst_offset = radeon_get_ib_value(p, idx+7);
3060 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32;
Christian Königdf0af442014-03-03 12:38:08 +01003061 ib[idx+7] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
3062 ib[idx+8] += upper_32_bits(dst_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003063 } else {
3064 /* linear src, tiled dst */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003065 src_offset = radeon_get_ib_value(p, idx+7);
3066 src_offset |= ((u64)(radeon_get_ib_value(p, idx+8) & 0xff)) << 32;
Christian Königdf0af442014-03-03 12:38:08 +01003067 ib[idx+7] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
3068 ib[idx+8] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003069
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003070 dst_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003071 dst_offset <<= 8;
Christian Königdf0af442014-03-03 12:38:08 +01003072 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003073 }
3074 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3075 dev_warn(p->dev, "DMA L2T, T2L src buffer too small (%llu %lu)\n",
3076 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3077 return -EINVAL;
3078 }
3079 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
3080 dev_warn(p->dev, "DMA L2T, T2L dst buffer too small (%llu %lu)\n",
3081 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
3082 return -EINVAL;
3083 }
3084 p->idx += 9;
3085 break;
3086 /* Copy T2T, partial (tile units) */
3087 case 0x4d:
3088 /* T2T partial */
3089 if (p->family < CHIP_CAYMAN) {
3090 DRM_ERROR("L2T, T2L Partial is cayman only !\n");
3091 return -EINVAL;
3092 }
Christian Königdf0af442014-03-03 12:38:08 +01003093 ib[idx+1] += (u32)(src_reloc->gpu_offset >> 8);
3094 ib[idx+4] += (u32)(dst_reloc->gpu_offset >> 8);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003095 p->idx += 13;
3096 break;
3097 /* Copy L2T broadcast (tile units) */
3098 case 0x4f:
3099 /* L2T, broadcast */
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003100 if (radeon_get_ib_value(p, idx + 2) & (1 << 31)) {
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003101 DRM_ERROR("bad L2T, broadcast DMA_PACKET_COPY\n");
3102 return -EINVAL;
3103 }
3104 r = r600_dma_cs_next_reloc(p, &dst2_reloc);
3105 if (r) {
3106 DRM_ERROR("bad L2T, broadcast DMA_PACKET_COPY\n");
3107 return -EINVAL;
3108 }
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003109 dst_offset = radeon_get_ib_value(p, idx+1);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003110 dst_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003111 dst2_offset = radeon_get_ib_value(p, idx+2);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003112 dst2_offset <<= 8;
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003113 src_offset = radeon_get_ib_value(p, idx+8);
3114 src_offset |= ((u64)(radeon_get_ib_value(p, idx+9) & 0xff)) << 32;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003115 if ((src_offset + (count * 4)) > radeon_bo_size(src_reloc->robj)) {
3116 dev_warn(p->dev, "DMA L2T, broadcast src buffer too small (%llu %lu)\n",
3117 src_offset + (count * 4), radeon_bo_size(src_reloc->robj));
3118 return -EINVAL;
3119 }
3120 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
3121 dev_warn(p->dev, "DMA L2T, broadcast dst buffer too small (%llu %lu)\n",
3122 dst_offset + (count * 4), radeon_bo_size(dst_reloc->robj));
3123 return -EINVAL;
3124 }
3125 if ((dst2_offset + (count * 4)) > radeon_bo_size(dst2_reloc->robj)) {
3126 dev_warn(p->dev, "DMA L2T, broadcast dst2 buffer too small (%llu %lu)\n",
3127 dst2_offset + (count * 4), radeon_bo_size(dst2_reloc->robj));
3128 return -EINVAL;
3129 }
Christian Königdf0af442014-03-03 12:38:08 +01003130 ib[idx+1] += (u32)(dst_reloc->gpu_offset >> 8);
3131 ib[idx+2] += (u32)(dst2_reloc->gpu_offset >> 8);
3132 ib[idx+8] += (u32)(src_reloc->gpu_offset & 0xfffffffc);
3133 ib[idx+9] += upper_32_bits(src_reloc->gpu_offset) & 0xff;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003134 p->idx += 10;
3135 break;
3136 default:
Linus Torvaldsfffddfd2013-02-25 16:46:44 -08003137 DRM_ERROR("bad DMA_PACKET_COPY [%6d] 0x%08x invalid sub cmd\n", idx, header);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003138 return -EINVAL;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05003139 }
3140 break;
3141 case DMA_PACKET_CONSTANT_FILL:
3142 r = r600_dma_cs_next_reloc(p, &dst_reloc);
3143 if (r) {
3144 DRM_ERROR("bad DMA_PACKET_CONSTANT_FILL\n");
3145 return -EINVAL;
3146 }
Jerome Glissede0babd2013-02-11 08:57:18 -05003147 dst_offset = radeon_get_ib_value(p, idx+1);
3148 dst_offset |= ((u64)(radeon_get_ib_value(p, idx+3) & 0x00ff0000)) << 16;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05003149 if ((dst_offset + (count * 4)) > radeon_bo_size(dst_reloc->robj)) {
3150 dev_warn(p->dev, "DMA constant fill buffer too small (%llu %lu)\n",
3151 dst_offset, radeon_bo_size(dst_reloc->robj));
3152 return -EINVAL;
3153 }
Christian Königdf0af442014-03-03 12:38:08 +01003154 ib[idx+1] += (u32)(dst_reloc->gpu_offset & 0xfffffffc);
3155 ib[idx+3] += (upper_32_bits(dst_reloc->gpu_offset) << 16) & 0x00ff0000;
Alex Deucherd2ead3e2012-12-13 09:55:45 -05003156 p->idx += 4;
3157 break;
3158 case DMA_PACKET_NOP:
3159 p->idx += 1;
3160 break;
3161 default:
3162 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx);
3163 return -EINVAL;
3164 }
Christian König6d2d13d2014-12-03 15:53:24 +01003165 } while (p->idx < p->chunk_ib->length_dw);
Alex Deucherd2ead3e2012-12-13 09:55:45 -05003166#if 0
3167 for (r = 0; r < p->ib->length_dw; r++) {
3168 printk(KERN_INFO "%05d 0x%08X\n", r, p->ib.ptr[r]);
3169 mdelay(1);
3170 }
3171#endif
3172 return 0;
3173}
3174
Jerome Glisse721604a2012-01-05 22:11:05 -05003175/* vm parser */
3176static bool evergreen_vm_reg_valid(u32 reg)
3177{
3178 /* context regs are fine */
3179 if (reg >= 0x28000)
3180 return true;
3181
3182 /* check config regs */
3183 switch (reg) {
Alex Deucher668bbc82012-12-20 21:19:32 -05003184 case WAIT_UNTIL:
Jerome Glisse721604a2012-01-05 22:11:05 -05003185 case GRBM_GFX_INDEX:
Alex Deucher860fe2f2012-11-08 10:08:04 -05003186 case CP_STRMOUT_CNTL:
3187 case CP_COHER_CNTL:
3188 case CP_COHER_SIZE:
Jerome Glisse721604a2012-01-05 22:11:05 -05003189 case VGT_VTX_VECT_EJECT_REG:
3190 case VGT_CACHE_INVALIDATION:
3191 case VGT_GS_VERTEX_REUSE:
3192 case VGT_PRIMITIVE_TYPE:
3193 case VGT_INDEX_TYPE:
3194 case VGT_NUM_INDICES:
3195 case VGT_NUM_INSTANCES:
3196 case VGT_COMPUTE_DIM_X:
3197 case VGT_COMPUTE_DIM_Y:
3198 case VGT_COMPUTE_DIM_Z:
3199 case VGT_COMPUTE_START_X:
3200 case VGT_COMPUTE_START_Y:
3201 case VGT_COMPUTE_START_Z:
3202 case VGT_COMPUTE_INDEX:
3203 case VGT_COMPUTE_THREAD_GROUP_SIZE:
3204 case VGT_HS_OFFCHIP_PARAM:
3205 case PA_CL_ENHANCE:
3206 case PA_SU_LINE_STIPPLE_VALUE:
3207 case PA_SC_LINE_STIPPLE_STATE:
3208 case PA_SC_ENHANCE:
3209 case SQ_DYN_GPR_CNTL_PS_FLUSH_REQ:
3210 case SQ_DYN_GPR_SIMD_LOCK_EN:
3211 case SQ_CONFIG:
3212 case SQ_GPR_RESOURCE_MGMT_1:
3213 case SQ_GLOBAL_GPR_RESOURCE_MGMT_1:
3214 case SQ_GLOBAL_GPR_RESOURCE_MGMT_2:
3215 case SQ_CONST_MEM_BASE:
3216 case SQ_STATIC_THREAD_MGMT_1:
3217 case SQ_STATIC_THREAD_MGMT_2:
3218 case SQ_STATIC_THREAD_MGMT_3:
3219 case SPI_CONFIG_CNTL:
3220 case SPI_CONFIG_CNTL_1:
3221 case TA_CNTL_AUX:
3222 case DB_DEBUG:
3223 case DB_DEBUG2:
3224 case DB_DEBUG3:
3225 case DB_DEBUG4:
3226 case DB_WATERMARKS:
3227 case TD_PS_BORDER_COLOR_INDEX:
3228 case TD_PS_BORDER_COLOR_RED:
3229 case TD_PS_BORDER_COLOR_GREEN:
3230 case TD_PS_BORDER_COLOR_BLUE:
3231 case TD_PS_BORDER_COLOR_ALPHA:
3232 case TD_VS_BORDER_COLOR_INDEX:
3233 case TD_VS_BORDER_COLOR_RED:
3234 case TD_VS_BORDER_COLOR_GREEN:
3235 case TD_VS_BORDER_COLOR_BLUE:
3236 case TD_VS_BORDER_COLOR_ALPHA:
3237 case TD_GS_BORDER_COLOR_INDEX:
3238 case TD_GS_BORDER_COLOR_RED:
3239 case TD_GS_BORDER_COLOR_GREEN:
3240 case TD_GS_BORDER_COLOR_BLUE:
3241 case TD_GS_BORDER_COLOR_ALPHA:
3242 case TD_HS_BORDER_COLOR_INDEX:
3243 case TD_HS_BORDER_COLOR_RED:
3244 case TD_HS_BORDER_COLOR_GREEN:
3245 case TD_HS_BORDER_COLOR_BLUE:
3246 case TD_HS_BORDER_COLOR_ALPHA:
3247 case TD_LS_BORDER_COLOR_INDEX:
3248 case TD_LS_BORDER_COLOR_RED:
3249 case TD_LS_BORDER_COLOR_GREEN:
3250 case TD_LS_BORDER_COLOR_BLUE:
3251 case TD_LS_BORDER_COLOR_ALPHA:
3252 case TD_CS_BORDER_COLOR_INDEX:
3253 case TD_CS_BORDER_COLOR_RED:
3254 case TD_CS_BORDER_COLOR_GREEN:
3255 case TD_CS_BORDER_COLOR_BLUE:
3256 case TD_CS_BORDER_COLOR_ALPHA:
3257 case SQ_ESGS_RING_SIZE:
3258 case SQ_GSVS_RING_SIZE:
3259 case SQ_ESTMP_RING_SIZE:
3260 case SQ_GSTMP_RING_SIZE:
3261 case SQ_HSTMP_RING_SIZE:
3262 case SQ_LSTMP_RING_SIZE:
3263 case SQ_PSTMP_RING_SIZE:
3264 case SQ_VSTMP_RING_SIZE:
3265 case SQ_ESGS_RING_ITEMSIZE:
3266 case SQ_ESTMP_RING_ITEMSIZE:
3267 case SQ_GSTMP_RING_ITEMSIZE:
3268 case SQ_GSVS_RING_ITEMSIZE:
3269 case SQ_GS_VERT_ITEMSIZE:
3270 case SQ_GS_VERT_ITEMSIZE_1:
3271 case SQ_GS_VERT_ITEMSIZE_2:
3272 case SQ_GS_VERT_ITEMSIZE_3:
3273 case SQ_GSVS_RING_OFFSET_1:
3274 case SQ_GSVS_RING_OFFSET_2:
3275 case SQ_GSVS_RING_OFFSET_3:
3276 case SQ_HSTMP_RING_ITEMSIZE:
3277 case SQ_LSTMP_RING_ITEMSIZE:
3278 case SQ_PSTMP_RING_ITEMSIZE:
3279 case SQ_VSTMP_RING_ITEMSIZE:
3280 case VGT_TF_RING_SIZE:
3281 case SQ_ESGS_RING_BASE:
3282 case SQ_GSVS_RING_BASE:
3283 case SQ_ESTMP_RING_BASE:
3284 case SQ_GSTMP_RING_BASE:
3285 case SQ_HSTMP_RING_BASE:
3286 case SQ_LSTMP_RING_BASE:
3287 case SQ_PSTMP_RING_BASE:
3288 case SQ_VSTMP_RING_BASE:
3289 case CAYMAN_VGT_OFFCHIP_LDS_BASE:
3290 case CAYMAN_SQ_EX_ALLOC_TABLE_SLOTS:
3291 return true;
3292 default:
Alex Deucherc7172132012-10-19 13:27:04 -04003293 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
Jerome Glisse721604a2012-01-05 22:11:05 -05003294 return false;
3295 }
3296}
3297
3298static int evergreen_vm_packet3_check(struct radeon_device *rdev,
3299 u32 *ib, struct radeon_cs_packet *pkt)
3300{
3301 u32 idx = pkt->idx + 1;
3302 u32 idx_value = ib[idx];
3303 u32 start_reg, end_reg, reg, i;
Alex Deucher94e014e2012-12-03 19:32:54 -05003304 u32 command, info;
Jerome Glisse721604a2012-01-05 22:11:05 -05003305
3306 switch (pkt->opcode) {
3307 case PACKET3_NOP:
Glenn Kennard16613742014-12-13 03:32:37 +01003308 break;
Jerome Glisse721604a2012-01-05 22:11:05 -05003309 case PACKET3_SET_BASE:
Glenn Kennard16613742014-12-13 03:32:37 +01003310 if (idx_value != 1) {
3311 DRM_ERROR("bad SET_BASE");
3312 return -EINVAL;
3313 }
3314 break;
Jerome Glisse721604a2012-01-05 22:11:05 -05003315 case PACKET3_CLEAR_STATE:
3316 case PACKET3_INDEX_BUFFER_SIZE:
3317 case PACKET3_DISPATCH_DIRECT:
3318 case PACKET3_DISPATCH_INDIRECT:
3319 case PACKET3_MODE_CONTROL:
3320 case PACKET3_SET_PREDICATION:
3321 case PACKET3_COND_EXEC:
3322 case PACKET3_PRED_EXEC:
3323 case PACKET3_DRAW_INDIRECT:
3324 case PACKET3_DRAW_INDEX_INDIRECT:
3325 case PACKET3_INDEX_BASE:
3326 case PACKET3_DRAW_INDEX_2:
3327 case PACKET3_CONTEXT_CONTROL:
3328 case PACKET3_DRAW_INDEX_OFFSET:
3329 case PACKET3_INDEX_TYPE:
3330 case PACKET3_DRAW_INDEX:
3331 case PACKET3_DRAW_INDEX_AUTO:
3332 case PACKET3_DRAW_INDEX_IMMD:
3333 case PACKET3_NUM_INSTANCES:
3334 case PACKET3_DRAW_INDEX_MULTI_AUTO:
3335 case PACKET3_STRMOUT_BUFFER_UPDATE:
3336 case PACKET3_DRAW_INDEX_OFFSET_2:
3337 case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
3338 case PACKET3_MPEG_INDEX:
3339 case PACKET3_WAIT_REG_MEM:
3340 case PACKET3_MEM_WRITE:
3341 case PACKET3_SURFACE_SYNC:
3342 case PACKET3_EVENT_WRITE:
3343 case PACKET3_EVENT_WRITE_EOP:
3344 case PACKET3_EVENT_WRITE_EOS:
3345 case PACKET3_SET_CONTEXT_REG:
3346 case PACKET3_SET_BOOL_CONST:
3347 case PACKET3_SET_LOOP_CONST:
3348 case PACKET3_SET_RESOURCE:
3349 case PACKET3_SET_SAMPLER:
3350 case PACKET3_SET_CTL_CONST:
3351 case PACKET3_SET_RESOURCE_OFFSET:
3352 case PACKET3_SET_CONTEXT_REG_INDIRECT:
3353 case PACKET3_SET_RESOURCE_INDIRECT:
3354 case CAYMAN_PACKET3_DEALLOC_STATE:
3355 break;
3356 case PACKET3_COND_WRITE:
3357 if (idx_value & 0x100) {
3358 reg = ib[idx + 5] * 4;
3359 if (!evergreen_vm_reg_valid(reg))
3360 return -EINVAL;
3361 }
3362 break;
3363 case PACKET3_COPY_DW:
3364 if (idx_value & 0x2) {
3365 reg = ib[idx + 3] * 4;
3366 if (!evergreen_vm_reg_valid(reg))
3367 return -EINVAL;
3368 }
3369 break;
3370 case PACKET3_SET_CONFIG_REG:
3371 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
3372 end_reg = 4 * pkt->count + start_reg - 4;
3373 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
3374 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
3375 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
3376 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
3377 return -EINVAL;
3378 }
3379 for (i = 0; i < pkt->count; i++) {
3380 reg = start_reg + (4 * i);
3381 if (!evergreen_vm_reg_valid(reg))
3382 return -EINVAL;
3383 }
3384 break;
Alex Deucher94e014e2012-12-03 19:32:54 -05003385 case PACKET3_CP_DMA:
3386 command = ib[idx + 4];
3387 info = ib[idx + 1];
Alex Deucher9d89d782012-12-14 00:23:06 -05003388 if ((((info & 0x60000000) >> 29) != 0) || /* src = GDS or DATA */
3389 (((info & 0x00300000) >> 20) != 0) || /* dst = GDS */
3390 ((((info & 0x00300000) >> 20) == 0) &&
3391 (command & PACKET3_CP_DMA_CMD_DAS)) || /* dst = register */
3392 ((((info & 0x60000000) >> 29) == 0) &&
3393 (command & PACKET3_CP_DMA_CMD_SAS))) { /* src = register */
3394 /* non mem to mem copies requires dw aligned count */
3395 if ((command & 0x1fffff) % 4) {
3396 DRM_ERROR("CP DMA command requires dw count alignment\n");
3397 return -EINVAL;
3398 }
3399 }
Alex Deucher94e014e2012-12-03 19:32:54 -05003400 if (command & PACKET3_CP_DMA_CMD_SAS) {
3401 /* src address space is register */
3402 if (((info & 0x60000000) >> 29) == 0) {
3403 start_reg = idx_value << 2;
3404 if (command & PACKET3_CP_DMA_CMD_SAIC) {
3405 reg = start_reg;
3406 if (!evergreen_vm_reg_valid(reg)) {
3407 DRM_ERROR("CP DMA Bad SRC register\n");
3408 return -EINVAL;
3409 }
3410 } else {
3411 for (i = 0; i < (command & 0x1fffff); i++) {
3412 reg = start_reg + (4 * i);
3413 if (!evergreen_vm_reg_valid(reg)) {
3414 DRM_ERROR("CP DMA Bad SRC register\n");
3415 return -EINVAL;
3416 }
3417 }
3418 }
3419 }
3420 }
3421 if (command & PACKET3_CP_DMA_CMD_DAS) {
3422 /* dst address space is register */
3423 if (((info & 0x00300000) >> 20) == 0) {
3424 start_reg = ib[idx + 2];
3425 if (command & PACKET3_CP_DMA_CMD_DAIC) {
3426 reg = start_reg;
3427 if (!evergreen_vm_reg_valid(reg)) {
3428 DRM_ERROR("CP DMA Bad DST register\n");
3429 return -EINVAL;
3430 }
3431 } else {
3432 for (i = 0; i < (command & 0x1fffff); i++) {
3433 reg = start_reg + (4 * i);
3434 if (!evergreen_vm_reg_valid(reg)) {
3435 DRM_ERROR("CP DMA Bad DST register\n");
3436 return -EINVAL;
3437 }
3438 }
3439 }
3440 }
3441 }
3442 break;
Jerome Glisse721604a2012-01-05 22:11:05 -05003443 default:
3444 return -EINVAL;
3445 }
3446 return 0;
3447}
3448
3449int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
3450{
3451 int ret = 0;
3452 u32 idx = 0;
3453 struct radeon_cs_packet pkt;
3454
3455 do {
3456 pkt.idx = idx;
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05003457 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
3458 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
Jerome Glisse721604a2012-01-05 22:11:05 -05003459 pkt.one_reg_wr = 0;
3460 switch (pkt.type) {
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05003461 case RADEON_PACKET_TYPE0:
Jerome Glisse721604a2012-01-05 22:11:05 -05003462 dev_err(rdev->dev, "Packet0 not allowed!\n");
3463 ret = -EINVAL;
3464 break;
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05003465 case RADEON_PACKET_TYPE2:
Alex Deucher0b41da62012-01-12 15:42:37 -05003466 idx += 1;
Jerome Glisse721604a2012-01-05 22:11:05 -05003467 break;
Ilija Hadzic4e872ae2013-01-02 18:27:48 -05003468 case RADEON_PACKET_TYPE3:
3469 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
Jerome Glisse721604a2012-01-05 22:11:05 -05003470 ret = evergreen_vm_packet3_check(rdev, ib->ptr, &pkt);
Alex Deucher0b41da62012-01-12 15:42:37 -05003471 idx += pkt.count + 2;
Jerome Glisse721604a2012-01-05 22:11:05 -05003472 break;
3473 default:
3474 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
3475 ret = -EINVAL;
3476 break;
3477 }
3478 if (ret)
3479 break;
Jerome Glisse721604a2012-01-05 22:11:05 -05003480 } while (idx < ib->length_dw);
3481
3482 return ret;
3483}
Alex Deuchercd459e52012-12-13 12:17:38 -05003484
3485/**
3486 * evergreen_dma_ib_parse() - parse the DMA IB for VM
3487 * @rdev: radeon_device pointer
3488 * @ib: radeon_ib pointer
3489 *
3490 * Parses the DMA IB from the VM CS ioctl
3491 * checks for errors. (Cayman-SI)
3492 * Returns 0 for success and an error on failure.
3493 **/
3494int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
3495{
3496 u32 idx = 0;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003497 u32 header, cmd, count, sub_cmd;
Alex Deuchercd459e52012-12-13 12:17:38 -05003498
3499 do {
3500 header = ib->ptr[idx];
3501 cmd = GET_DMA_CMD(header);
3502 count = GET_DMA_COUNT(header);
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003503 sub_cmd = GET_DMA_SUB_CMD(header);
Alex Deuchercd459e52012-12-13 12:17:38 -05003504
3505 switch (cmd) {
3506 case DMA_PACKET_WRITE:
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003507 switch (sub_cmd) {
3508 /* tiled */
3509 case 8:
Alex Deuchercd459e52012-12-13 12:17:38 -05003510 idx += count + 7;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003511 break;
3512 /* linear */
3513 case 0:
Alex Deuchercd459e52012-12-13 12:17:38 -05003514 idx += count + 3;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003515 break;
3516 default:
3517 DRM_ERROR("bad DMA_PACKET_WRITE [%6d] 0x%08x sub cmd is not 0 or 8\n", idx, ib->ptr[idx]);
3518 return -EINVAL;
3519 }
Alex Deuchercd459e52012-12-13 12:17:38 -05003520 break;
3521 case DMA_PACKET_COPY:
Jerome Glisse0fcb6152013-01-14 11:32:27 -05003522 switch (sub_cmd) {
3523 /* Copy L2L, DW aligned */
3524 case 0x00:
3525 idx += 5;
3526 break;
3527 /* Copy L2T/T2L */
3528 case 0x08:
3529 idx += 9;
3530 break;
3531 /* Copy L2L, byte aligned */
3532 case 0x40:
3533 idx += 5;
3534 break;
3535 /* Copy L2L, partial */
3536 case 0x41:
3537 idx += 9;
3538 break;
3539 /* Copy L2L, DW aligned, broadcast */
3540 case 0x44:
3541 idx += 7;
3542 break;
3543 /* Copy L2T Frame to Field */
3544 case 0x48:
3545 idx += 10;
3546 break;
3547 /* Copy L2T/T2L, partial */
3548 case 0x49:
3549 idx += 12;
3550 break;
3551 /* Copy L2T broadcast */
3552 case 0x4b:
3553 idx += 10;
3554 break;
3555 /* Copy L2T/T2L (tile units) */
3556 case 0x4c:
3557 idx += 9;
3558 break;
3559 /* Copy T2T, partial (tile units) */
3560 case 0x4d:
3561 idx += 13;
3562 break;
3563 /* Copy L2T broadcast (tile units) */
3564 case 0x4f:
3565 idx += 10;
3566 break;
3567 default:
3568 DRM_ERROR("bad DMA_PACKET_COPY [%6d] 0x%08x invalid sub cmd\n", idx, ib->ptr[idx]);
3569 return -EINVAL;
Alex Deuchercd459e52012-12-13 12:17:38 -05003570 }
3571 break;
3572 case DMA_PACKET_CONSTANT_FILL:
3573 idx += 4;
3574 break;
3575 case DMA_PACKET_NOP:
3576 idx += 1;
3577 break;
3578 default:
3579 DRM_ERROR("Unknown packet type %d at %d !\n", cmd, idx);
3580 return -EINVAL;
3581 }
3582 } while (idx < ib->length_dw);
3583
3584 return 0;
3585}