blob: 40b7d6ce8f8112d6c404b7d06db4d55413392ae4 [file] [log] [blame]
Eric Anholtc6344e82005-11-28 23:10:41 +00001/* radeon_state.c -- State support for Radeon -*- linux-c -*- */
2/*
Kevin E Martin0994e632001-01-05 22:57:55 +00003 * Copyright 2000 VA Linux Systems, Inc., Fremont, California.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 *
25 * Authors:
David Dawes0e5b8d72001-03-19 17:45:52 +000026 * Gareth Hughes <gareth@valinux.com>
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000027 * Kevin E. Martin <martin@valinux.com>
Kevin E Martin0994e632001-01-05 22:57:55 +000028 */
29
Kevin E Martin0994e632001-01-05 22:57:55 +000030#include "drmP.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000031#include "drm.h"
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +000032#include "drm_sarea.h"
Jens Owen3903e5a2002-04-09 21:54:56 +000033#include "radeon_drm.h"
34#include "radeon_drv.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000035
Kevin E Martin0994e632001-01-05 22:57:55 +000036/* ================================================================
Michel Daenzer2655ccd2003-11-04 00:46:05 +000037 * Helper functions for client state checking and fixup
38 */
39
Jon Smirl9f9a8f12004-09-30 21:12:10 +000040static __inline__ int radeon_check_and_fixup_offset(drm_radeon_private_t *
41 dev_priv,
42 drm_file_t * filp_priv,
Dave Airlie9fad1012006-02-18 03:04:30 +000043 u32 * offset)
Jon Smirl9f9a8f12004-09-30 21:12:10 +000044{
Michel Dänzerb99e3322006-08-26 12:21:11 +020045 u64 off = *offset;
Michel Dänzeraefc7a32006-12-14 19:31:56 +010046 u32 fb_end = dev_priv->fb_location + dev_priv->fb_size - 1;
Dave Airlied4dbf452004-08-24 11:15:53 +000047 struct drm_radeon_driver_file_fields *radeon_priv;
Michel Daenzer2655ccd2003-11-04 00:46:05 +000048
Dave Airlie9fad1012006-02-18 03:04:30 +000049 /* Hrm ... the story of the offset ... So this function converts
50 * the various ideas of what userland clients might have for an
51 * offset in the card address space into an offset into the card
52 * address space :) So with a sane client, it should just keep
53 * the value intact and just do some boundary checking. However,
54 * not all clients are sane. Some older clients pass us 0 based
55 * offsets relative to the start of the framebuffer and some may
56 * assume the AGP aperture it appended to the framebuffer, so we
57 * try to detect those cases and fix them up.
58 *
59 * Note: It might be a good idea here to make sure the offset lands
60 * in some "allowed" area to protect things like the PCIE GART...
61 */
62
63 /* First, the best case, the offset already lands in either the
64 * framebuffer or the GART mapped space
65 */
Michel Dänzeraefc7a32006-12-14 19:31:56 +010066 if (radeon_check_offset(dev_priv, off))
Michel Daenzer2655ccd2003-11-04 00:46:05 +000067 return 0;
68
Dave Airlie9fad1012006-02-18 03:04:30 +000069 /* Ok, that didn't happen... now check if we have a zero based
70 * offset that fits in the framebuffer + gart space, apply the
71 * magic offset we get from SETPARAM or calculated from fb_location
72 */
73 if (off < (dev_priv->fb_size + dev_priv->gart_size)) {
74 radeon_priv = filp_priv->driver_priv;
75 off += radeon_priv->radeon_fb_delta;
76 }
Michel Daenzer2655ccd2003-11-04 00:46:05 +000077
Dave Airlie9fad1012006-02-18 03:04:30 +000078 /* Finally, assume we aimed at a GART offset if beyond the fb */
Michel Dänzerb99e3322006-08-26 12:21:11 +020079 if (off > fb_end)
Michel Dänzeraefc7a32006-12-14 19:31:56 +010080 off = off - fb_end - 1 + dev_priv->gart_vm_start;
Michel Daenzer2655ccd2003-11-04 00:46:05 +000081
Dave Airlie9fad1012006-02-18 03:04:30 +000082 /* Now recheck and fail if out of bounds */
Michel Dänzeraefc7a32006-12-14 19:31:56 +010083 if (radeon_check_offset(dev_priv, off)) {
Michel Dänzerb99e3322006-08-26 12:21:11 +020084 DRM_DEBUG("offset fixed up to 0x%x\n", (unsigned int)off);
Dave Airlie9fad1012006-02-18 03:04:30 +000085 *offset = off;
86 return 0;
87 }
88 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +000089}
90
Jon Smirl9f9a8f12004-09-30 21:12:10 +000091static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
92 dev_priv,
93 drm_file_t * filp_priv,
Dave Airliebbcba832006-01-02 05:39:19 +000094 int id, u32 *data)
Jon Smirl9f9a8f12004-09-30 21:12:10 +000095{
96 switch (id) {
Michel Daenzer0dea4de2004-01-10 20:59:16 +000097
98 case RADEON_EMIT_PP_MISC:
Eric Anholt81459d62005-02-08 04:17:14 +000099 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
100 &data[(RADEON_RB3D_DEPTHOFFSET - RADEON_PP_MISC) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000101 DRM_ERROR("Invalid depth buffer offset\n");
102 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000103 }
104 break;
105
106 case RADEON_EMIT_PP_CNTL:
Eric Anholt81459d62005-02-08 04:17:14 +0000107 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
108 &data[(RADEON_RB3D_COLOROFFSET - RADEON_PP_CNTL) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000109 DRM_ERROR("Invalid colour buffer offset\n");
110 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000111 }
112 break;
113
114 case R200_EMIT_PP_TXOFFSET_0:
115 case R200_EMIT_PP_TXOFFSET_1:
116 case R200_EMIT_PP_TXOFFSET_2:
117 case R200_EMIT_PP_TXOFFSET_3:
118 case R200_EMIT_PP_TXOFFSET_4:
119 case R200_EMIT_PP_TXOFFSET_5:
Eric Anholt81459d62005-02-08 04:17:14 +0000120 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
121 &data[0])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000122 DRM_ERROR("Invalid R200 texture offset\n");
123 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000124 }
125 break;
126
127 case RADEON_EMIT_PP_TXFILTER_0:
128 case RADEON_EMIT_PP_TXFILTER_1:
129 case RADEON_EMIT_PP_TXFILTER_2:
Eric Anholt81459d62005-02-08 04:17:14 +0000130 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
131 &data[(RADEON_PP_TXOFFSET_0 - RADEON_PP_TXFILTER_0) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000132 DRM_ERROR("Invalid R100 texture offset\n");
133 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000134 }
135 break;
136
137 case R200_EMIT_PP_CUBIC_OFFSETS_0:
138 case R200_EMIT_PP_CUBIC_OFFSETS_1:
139 case R200_EMIT_PP_CUBIC_OFFSETS_2:
140 case R200_EMIT_PP_CUBIC_OFFSETS_3:
141 case R200_EMIT_PP_CUBIC_OFFSETS_4:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000142 case R200_EMIT_PP_CUBIC_OFFSETS_5:{
143 int i;
144 for (i = 0; i < 5; i++) {
Eric Anholt81459d62005-02-08 04:17:14 +0000145 if (radeon_check_and_fixup_offset(dev_priv,
146 filp_priv,
147 &data[i])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000148 DRM_ERROR
149 ("Invalid R200 cubic texture offset\n");
150 return DRM_ERR(EINVAL);
151 }
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000152 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000153 break;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000154 }
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000155
Roland Scheidegger732cdc52005-02-10 19:22:43 +0000156 case RADEON_EMIT_PP_CUBIC_OFFSETS_T0:
157 case RADEON_EMIT_PP_CUBIC_OFFSETS_T1:
158 case RADEON_EMIT_PP_CUBIC_OFFSETS_T2:{
159 int i;
160 for (i = 0; i < 5; i++) {
161 if (radeon_check_and_fixup_offset(dev_priv,
162 filp_priv,
163 &data[i])) {
164 DRM_ERROR
165 ("Invalid R100 cubic texture offset\n");
166 return DRM_ERR(EINVAL);
167 }
168 }
169 }
170 break;
171
Roland Scheidegger1f71b8d2006-09-20 19:44:57 +0200172 case R200_EMIT_VAP_CTL: {
173 RING_LOCALS;
174 BEGIN_RING(2);
175 OUT_RING_REG(RADEON_SE_TCL_STATE_FLUSH, 0);
176 ADVANCE_RING();
177 }
178 break;
179
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000180 case RADEON_EMIT_RB3D_COLORPITCH:
181 case RADEON_EMIT_RE_LINE_PATTERN:
182 case RADEON_EMIT_SE_LINE_WIDTH:
183 case RADEON_EMIT_PP_LUM_MATRIX:
184 case RADEON_EMIT_PP_ROT_MATRIX_0:
185 case RADEON_EMIT_RB3D_STENCILREFMASK:
186 case RADEON_EMIT_SE_VPORT_XSCALE:
187 case RADEON_EMIT_SE_CNTL:
188 case RADEON_EMIT_SE_CNTL_STATUS:
189 case RADEON_EMIT_RE_MISC:
190 case RADEON_EMIT_PP_BORDER_COLOR_0:
191 case RADEON_EMIT_PP_BORDER_COLOR_1:
192 case RADEON_EMIT_PP_BORDER_COLOR_2:
193 case RADEON_EMIT_SE_ZBIAS_FACTOR:
194 case RADEON_EMIT_SE_TCL_OUTPUT_VTX_FMT:
195 case RADEON_EMIT_SE_TCL_MATERIAL_EMMISSIVE_RED:
196 case R200_EMIT_PP_TXCBLEND_0:
197 case R200_EMIT_PP_TXCBLEND_1:
198 case R200_EMIT_PP_TXCBLEND_2:
199 case R200_EMIT_PP_TXCBLEND_3:
200 case R200_EMIT_PP_TXCBLEND_4:
201 case R200_EMIT_PP_TXCBLEND_5:
202 case R200_EMIT_PP_TXCBLEND_6:
203 case R200_EMIT_PP_TXCBLEND_7:
204 case R200_EMIT_TCL_LIGHT_MODEL_CTL_0:
205 case R200_EMIT_TFACTOR_0:
206 case R200_EMIT_VTX_FMT_0:
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000207 case R200_EMIT_MATRIX_SELECT_0:
208 case R200_EMIT_TEX_PROC_CTL_2:
209 case R200_EMIT_TCL_UCP_VERT_BLEND_CTL:
210 case R200_EMIT_PP_TXFILTER_0:
211 case R200_EMIT_PP_TXFILTER_1:
212 case R200_EMIT_PP_TXFILTER_2:
213 case R200_EMIT_PP_TXFILTER_3:
214 case R200_EMIT_PP_TXFILTER_4:
215 case R200_EMIT_PP_TXFILTER_5:
216 case R200_EMIT_VTE_CNTL:
217 case R200_EMIT_OUTPUT_VTX_COMP_SEL:
218 case R200_EMIT_PP_TAM_DEBUG3:
219 case R200_EMIT_PP_CNTL_X:
220 case R200_EMIT_RB3D_DEPTHXY_OFFSET:
221 case R200_EMIT_RE_AUX_SCISSOR_CNTL:
222 case R200_EMIT_RE_SCISSOR_TL_0:
223 case R200_EMIT_RE_SCISSOR_TL_1:
224 case R200_EMIT_RE_SCISSOR_TL_2:
225 case R200_EMIT_SE_VAP_CNTL_STATUS:
226 case R200_EMIT_SE_VTX_STATE_CNTL:
227 case R200_EMIT_RE_POINTSIZE:
228 case R200_EMIT_TCL_INPUT_VTX_VECTOR_ADDR_0:
229 case R200_EMIT_PP_CUBIC_FACES_0:
230 case R200_EMIT_PP_CUBIC_FACES_1:
231 case R200_EMIT_PP_CUBIC_FACES_2:
232 case R200_EMIT_PP_CUBIC_FACES_3:
233 case R200_EMIT_PP_CUBIC_FACES_4:
234 case R200_EMIT_PP_CUBIC_FACES_5:
235 case RADEON_EMIT_PP_TEX_SIZE_0:
236 case RADEON_EMIT_PP_TEX_SIZE_1:
237 case RADEON_EMIT_PP_TEX_SIZE_2:
Roland Scheidegger43c244e2004-05-18 23:30:46 +0000238 case R200_EMIT_RB3D_BLENDCOLOR:
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000239 case R200_EMIT_TCL_POINT_SPRITE_CNTL:
Roland Scheidegger732cdc52005-02-10 19:22:43 +0000240 case RADEON_EMIT_PP_CUBIC_FACES_0:
241 case RADEON_EMIT_PP_CUBIC_FACES_1:
242 case RADEON_EMIT_PP_CUBIC_FACES_2:
Roland Scheidegger34563922005-03-15 22:12:30 +0000243 case R200_EMIT_PP_TRI_PERF_CNTL:
Roland Scheidegger53c80372005-09-09 22:35:49 +0000244 case R200_EMIT_PP_AFS_0:
245 case R200_EMIT_PP_AFS_1:
246 case R200_EMIT_ATF_TFACTOR:
247 case R200_EMIT_PP_TXCTLALL_0:
248 case R200_EMIT_PP_TXCTLALL_1:
249 case R200_EMIT_PP_TXCTLALL_2:
250 case R200_EMIT_PP_TXCTLALL_3:
251 case R200_EMIT_PP_TXCTLALL_4:
252 case R200_EMIT_PP_TXCTLALL_5:
Roland Scheideggerf4e6e442006-05-24 18:36:24 +0000253 case R200_EMIT_VAP_PVS_CNTL:
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000254 /* These packets don't contain memory offsets */
255 break;
256
257 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000258 DRM_ERROR("Unknown state packet ID %d\n", id);
259 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000260 }
261
262 return 0;
263}
264
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000265static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
266 dev_priv,
Dave Airliebbcba832006-01-02 05:39:19 +0000267 drm_file_t *filp_priv,
268 drm_radeon_kcmd_buffer_t *
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000269 cmdbuf,
270 unsigned int *cmdsz)
271{
Eric Anholt81459d62005-02-08 04:17:14 +0000272 u32 *cmd = (u32 *) cmdbuf->buf;
Roland Scheideggera9f57a22006-10-10 02:24:19 +0200273 u32 offset, narrays;
274 int count, i, k;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000275
Eric Anholt81459d62005-02-08 04:17:14 +0000276 *cmdsz = 2 + ((cmd[0] & RADEON_CP_PACKET_COUNT_MASK) >> 16);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000277
Eric Anholt81459d62005-02-08 04:17:14 +0000278 if ((cmd[0] & 0xc0000000) != RADEON_CP_PACKET3) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000279 DRM_ERROR("Not a type 3 packet\n");
280 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000281 }
282
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000283 if (4 * *cmdsz > cmdbuf->bufsz) {
284 DRM_ERROR("Packet size larger than size of data provided\n");
285 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000286 }
287
Roland Scheideggera9f57a22006-10-10 02:24:19 +0200288 switch(cmd[0] & 0xff00) {
289 /* XXX Are there old drivers needing other packets? */
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000290
Roland Scheideggera9f57a22006-10-10 02:24:19 +0200291 case RADEON_3D_DRAW_IMMD:
292 case RADEON_3D_DRAW_VBUF:
293 case RADEON_3D_DRAW_INDX:
294 case RADEON_WAIT_FOR_IDLE:
295 case RADEON_CP_NOP:
296 case RADEON_3D_CLEAR_ZMASK:
297/* case RADEON_CP_NEXT_CHAR:
298 case RADEON_CP_PLY_NEXTSCAN:
299 case RADEON_CP_SET_SCISSORS: */ /* probably safe but will never need them? */
300 /* these packets are safe */
301 break;
302
303 case RADEON_CP_3D_DRAW_IMMD_2:
304 case RADEON_CP_3D_DRAW_VBUF_2:
305 case RADEON_CP_3D_DRAW_INDX_2:
306 case RADEON_3D_CLEAR_HIZ:
307 /* safe but r200 only */
308 if (dev_priv->microcode_version != UCODE_R200) {
309 DRM_ERROR("Invalid 3d packet for r100-class chip\n");
310 return DRM_ERR(EINVAL);
311 }
312 break;
313
314 case RADEON_3D_LOAD_VBPNTR:
315 count = (cmd[0] >> 16) & 0x3fff;
316
317 if (count > 18) { /* 12 arrays max */
318 DRM_ERROR("Too large payload in 3D_LOAD_VBPNTR (count=%d)\n",
319 count);
320 return DRM_ERR(EINVAL);
321 }
322
323 /* carefully check packet contents */
324 narrays = cmd[1] & ~0xc000;
325 k = 0;
326 i = 2;
327 while ((k < narrays) && (i < (count + 2))) {
328 i++; /* skip attribute field */
329 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &cmd[i])) {
330 DRM_ERROR
331 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n",
332 k, i);
333 return DRM_ERR(EINVAL);
334 }
335 k++;
336 i++;
337 if (k == narrays)
338 break;
339 /* have one more to process, they come in pairs */
340 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &cmd[i])) {
341 DRM_ERROR
342 ("Invalid offset (k=%d i=%d) in 3D_LOAD_VBPNTR packet.\n",
343 k, i);
344 return DRM_ERR(EINVAL);
345 }
346 k++;
347 i++;
348 }
349 /* do the counts match what we expect ? */
350 if ((k != narrays) || (i != (count + 2))) {
351 DRM_ERROR
352 ("Malformed 3D_LOAD_VBPNTR packet (k=%d i=%d narrays=%d count+1=%d).\n",
353 k, i, narrays, count + 1);
354 return DRM_ERR(EINVAL);
355 }
356 break;
357
358 case RADEON_3D_RNDR_GEN_INDX_PRIM:
359 if (dev_priv->microcode_version != UCODE_R100) {
360 DRM_ERROR("Invalid 3d packet for r200-class chip\n");
361 return DRM_ERR(EINVAL);
362 }
363 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &cmd[1])) {
364 DRM_ERROR("Invalid rndr_gen_indx offset\n");
365 return DRM_ERR(EINVAL);
366 }
367 break;
368
369 case RADEON_CP_INDX_BUFFER:
370 if (dev_priv->microcode_version != UCODE_R200) {
371 DRM_ERROR("Invalid 3d packet for r100-class chip\n");
372 return DRM_ERR(EINVAL);
373 }
374 if ((cmd[1] & 0x8000ffff) != 0x80000810) {
375 DRM_ERROR("Invalid indx_buffer reg address %08X\n", cmd[1]);
376 return DRM_ERR(EINVAL);
377 }
378 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &cmd[2])) {
379 DRM_ERROR("Invalid indx_buffer offset is %08X\n", cmd[2]);
380 return DRM_ERR(EINVAL);
381 }
382 break;
383
384 case RADEON_CNTL_HOSTDATA_BLT:
385 case RADEON_CNTL_PAINT_MULTI:
386 case RADEON_CNTL_BITBLT_MULTI:
387 /* MSB of opcode: next DWORD GUI_CNTL */
Eric Anholt81459d62005-02-08 04:17:14 +0000388 if (cmd[1] & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000389 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
Eric Anholt81459d62005-02-08 04:17:14 +0000390 offset = cmd[2] << 10;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000391 if (radeon_check_and_fixup_offset
392 (dev_priv, filp_priv, &offset)) {
393 DRM_ERROR("Invalid first packet offset\n");
394 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000395 }
Eric Anholt81459d62005-02-08 04:17:14 +0000396 cmd[2] = (cmd[2] & 0xffc00000) | offset >> 10;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000397 }
398
Eric Anholt81459d62005-02-08 04:17:14 +0000399 if ((cmd[1] & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) &&
400 (cmd[1] & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
401 offset = cmd[3] << 10;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000402 if (radeon_check_and_fixup_offset
403 (dev_priv, filp_priv, &offset)) {
404 DRM_ERROR("Invalid second packet offset\n");
405 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000406 }
Eric Anholt81459d62005-02-08 04:17:14 +0000407 cmd[3] = (cmd[3] & 0xffc00000) | offset >> 10;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000408 }
Roland Scheideggera9f57a22006-10-10 02:24:19 +0200409 break;
410
411 default:
412 DRM_ERROR("Invalid packet type %x\n", cmd[0] & 0xff00);
413 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000414 }
415
416 return 0;
417}
418
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000419/* ================================================================
Kevin E Martin0994e632001-01-05 22:57:55 +0000420 * CP hardware state programming functions
421 */
422
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000423static __inline__ void radeon_emit_clip_rect(drm_radeon_private_t * dev_priv,
424 drm_clip_rect_t * box)
Kevin E Martin0994e632001-01-05 22:57:55 +0000425{
426 RING_LOCALS;
427
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000428 DRM_DEBUG(" box: x1=%d y1=%d x2=%d y2=%d\n",
429 box->x1, box->y1, box->x2, box->y2);
Kevin E Martin0994e632001-01-05 22:57:55 +0000430
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000431 BEGIN_RING(4);
432 OUT_RING(CP_PACKET0(RADEON_RE_TOP_LEFT, 0));
433 OUT_RING((box->y1 << 16) | box->x1);
434 OUT_RING(CP_PACKET0(RADEON_RE_WIDTH_HEIGHT, 0));
435 OUT_RING(((box->y2 - 1) << 16) | (box->x2 - 1));
Kevin E Martin0994e632001-01-05 22:57:55 +0000436 ADVANCE_RING();
437}
438
Keith Whitwell2dcada32002-06-12 15:50:28 +0000439/* Emit 1.1 state
440 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000441static int radeon_emit_state(drm_radeon_private_t * dev_priv,
442 drm_file_t * filp_priv,
443 drm_radeon_context_regs_t * ctx,
444 drm_radeon_texture_regs_t * tex,
445 unsigned int dirty)
Kevin E Martin0994e632001-01-05 22:57:55 +0000446{
Kevin E Martin0994e632001-01-05 22:57:55 +0000447 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000448 DRM_DEBUG("dirty=0x%08x\n", dirty);
Kevin E Martin0994e632001-01-05 22:57:55 +0000449
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000450 if (dirty & RADEON_UPLOAD_CONTEXT) {
451 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
452 &ctx->rb3d_depthoffset)) {
453 DRM_ERROR("Invalid depth buffer offset\n");
454 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000455 }
456
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000457 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
458 &ctx->rb3d_coloroffset)) {
459 DRM_ERROR("Invalid depth buffer offset\n");
460 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000461 }
462
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000463 BEGIN_RING(14);
464 OUT_RING(CP_PACKET0(RADEON_PP_MISC, 6));
465 OUT_RING(ctx->pp_misc);
466 OUT_RING(ctx->pp_fog_color);
467 OUT_RING(ctx->re_solid_color);
468 OUT_RING(ctx->rb3d_blendcntl);
469 OUT_RING(ctx->rb3d_depthoffset);
470 OUT_RING(ctx->rb3d_depthpitch);
471 OUT_RING(ctx->rb3d_zstencilcntl);
472 OUT_RING(CP_PACKET0(RADEON_PP_CNTL, 2));
473 OUT_RING(ctx->pp_cntl);
474 OUT_RING(ctx->rb3d_cntl);
475 OUT_RING(ctx->rb3d_coloroffset);
476 OUT_RING(CP_PACKET0(RADEON_RB3D_COLORPITCH, 0));
477 OUT_RING(ctx->rb3d_colorpitch);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000478 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000479 }
480
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000481 if (dirty & RADEON_UPLOAD_VERTFMT) {
482 BEGIN_RING(2);
483 OUT_RING(CP_PACKET0(RADEON_SE_COORD_FMT, 0));
484 OUT_RING(ctx->se_coord_fmt);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000485 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000486 }
487
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000488 if (dirty & RADEON_UPLOAD_LINE) {
489 BEGIN_RING(5);
490 OUT_RING(CP_PACKET0(RADEON_RE_LINE_PATTERN, 1));
491 OUT_RING(ctx->re_line_pattern);
492 OUT_RING(ctx->re_line_state);
493 OUT_RING(CP_PACKET0(RADEON_SE_LINE_WIDTH, 0));
494 OUT_RING(ctx->se_line_width);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000495 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000496 }
497
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000498 if (dirty & RADEON_UPLOAD_BUMPMAP) {
499 BEGIN_RING(5);
500 OUT_RING(CP_PACKET0(RADEON_PP_LUM_MATRIX, 0));
501 OUT_RING(ctx->pp_lum_matrix);
502 OUT_RING(CP_PACKET0(RADEON_PP_ROT_MATRIX_0, 1));
503 OUT_RING(ctx->pp_rot_matrix_0);
504 OUT_RING(ctx->pp_rot_matrix_1);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000505 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000506 }
507
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000508 if (dirty & RADEON_UPLOAD_MASKS) {
509 BEGIN_RING(4);
510 OUT_RING(CP_PACKET0(RADEON_RB3D_STENCILREFMASK, 2));
511 OUT_RING(ctx->rb3d_stencilrefmask);
512 OUT_RING(ctx->rb3d_ropcntl);
513 OUT_RING(ctx->rb3d_planemask);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000514 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000515 }
516
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000517 if (dirty & RADEON_UPLOAD_VIEWPORT) {
518 BEGIN_RING(7);
519 OUT_RING(CP_PACKET0(RADEON_SE_VPORT_XSCALE, 5));
520 OUT_RING(ctx->se_vport_xscale);
521 OUT_RING(ctx->se_vport_xoffset);
522 OUT_RING(ctx->se_vport_yscale);
523 OUT_RING(ctx->se_vport_yoffset);
524 OUT_RING(ctx->se_vport_zscale);
525 OUT_RING(ctx->se_vport_zoffset);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000526 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000527 }
528
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000529 if (dirty & RADEON_UPLOAD_SETUP) {
530 BEGIN_RING(4);
531 OUT_RING(CP_PACKET0(RADEON_SE_CNTL, 0));
532 OUT_RING(ctx->se_cntl);
533 OUT_RING(CP_PACKET0(RADEON_SE_CNTL_STATUS, 0));
534 OUT_RING(ctx->se_cntl_status);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000535 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000536 }
537
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000538 if (dirty & RADEON_UPLOAD_MISC) {
539 BEGIN_RING(2);
540 OUT_RING(CP_PACKET0(RADEON_RE_MISC, 0));
541 OUT_RING(ctx->re_misc);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000542 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000543 }
544
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000545 if (dirty & RADEON_UPLOAD_TEX0) {
546 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
547 &tex[0].pp_txoffset)) {
548 DRM_ERROR("Invalid texture offset for unit 0\n");
549 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000550 }
551
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000552 BEGIN_RING(9);
553 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_0, 5));
554 OUT_RING(tex[0].pp_txfilter);
555 OUT_RING(tex[0].pp_txformat);
556 OUT_RING(tex[0].pp_txoffset);
557 OUT_RING(tex[0].pp_txcblend);
558 OUT_RING(tex[0].pp_txablend);
559 OUT_RING(tex[0].pp_tfactor);
560 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_0, 0));
561 OUT_RING(tex[0].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000562 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000563 }
564
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000565 if (dirty & RADEON_UPLOAD_TEX1) {
566 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
567 &tex[1].pp_txoffset)) {
568 DRM_ERROR("Invalid texture offset for unit 1\n");
569 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000570 }
571
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000572 BEGIN_RING(9);
573 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_1, 5));
574 OUT_RING(tex[1].pp_txfilter);
575 OUT_RING(tex[1].pp_txformat);
576 OUT_RING(tex[1].pp_txoffset);
577 OUT_RING(tex[1].pp_txcblend);
578 OUT_RING(tex[1].pp_txablend);
579 OUT_RING(tex[1].pp_tfactor);
580 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_1, 0));
581 OUT_RING(tex[1].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000582 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000583 }
584
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000585 if (dirty & RADEON_UPLOAD_TEX2) {
586 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
587 &tex[2].pp_txoffset)) {
588 DRM_ERROR("Invalid texture offset for unit 2\n");
589 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000590 }
591
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000592 BEGIN_RING(9);
593 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_2, 5));
594 OUT_RING(tex[2].pp_txfilter);
595 OUT_RING(tex[2].pp_txformat);
596 OUT_RING(tex[2].pp_txoffset);
597 OUT_RING(tex[2].pp_txcblend);
598 OUT_RING(tex[2].pp_txablend);
599 OUT_RING(tex[2].pp_tfactor);
600 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_2, 0));
601 OUT_RING(tex[2].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000602 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000603 }
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000604
605 return 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000606}
607
Keith Whitwell2dcada32002-06-12 15:50:28 +0000608/* Emit 1.2 state
609 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000610static int radeon_emit_state2(drm_radeon_private_t * dev_priv,
611 drm_file_t * filp_priv,
612 drm_radeon_state_t * state)
David Dawesab87c5d2002-02-14 02:00:26 +0000613{
614 RING_LOCALS;
David Dawesab87c5d2002-02-14 02:00:26 +0000615
Keith Whitwell2dcada32002-06-12 15:50:28 +0000616 if (state->dirty & RADEON_UPLOAD_ZBIAS) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000617 BEGIN_RING(3);
618 OUT_RING(CP_PACKET0(RADEON_SE_ZBIAS_FACTOR, 1));
619 OUT_RING(state->context2.se_zbias_factor);
620 OUT_RING(state->context2.se_zbias_constant);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000621 ADVANCE_RING();
622 }
David Dawesab87c5d2002-02-14 02:00:26 +0000623
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000624 return radeon_emit_state(dev_priv, filp_priv, &state->context,
625 state->tex, state->dirty);
David Dawesab87c5d2002-02-14 02:00:26 +0000626}
627
Keith Whitwell2dcada32002-06-12 15:50:28 +0000628/* New (1.3) state mechanism. 3 commands (packet, scalar, vector) in
629 * 1.3 cmdbuffers allow all previous state to be updated as well as
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000630 * the tcl scalar and vector areas.
Keith Whitwell2dcada32002-06-12 15:50:28 +0000631 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000632static struct {
633 int start;
634 int len;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000635 const char *name;
636} packet[RADEON_MAX_STATE_PACKETS] = {
Dave Airlie97528042005-11-11 10:02:10 +0000637 {RADEON_PP_MISC, 7, "RADEON_PP_MISC"},
638 {RADEON_PP_CNTL, 3, "RADEON_PP_CNTL"},
639 {RADEON_RB3D_COLORPITCH, 1, "RADEON_RB3D_COLORPITCH"},
640 {RADEON_RE_LINE_PATTERN, 2, "RADEON_RE_LINE_PATTERN"},
641 {RADEON_SE_LINE_WIDTH, 1, "RADEON_SE_LINE_WIDTH"},
642 {RADEON_PP_LUM_MATRIX, 1, "RADEON_PP_LUM_MATRIX"},
643 {RADEON_PP_ROT_MATRIX_0, 2, "RADEON_PP_ROT_MATRIX_0"},
644 {RADEON_RB3D_STENCILREFMASK, 3, "RADEON_RB3D_STENCILREFMASK"},
645 {RADEON_SE_VPORT_XSCALE, 6, "RADEON_SE_VPORT_XSCALE"},
646 {RADEON_SE_CNTL, 2, "RADEON_SE_CNTL"},
647 {RADEON_SE_CNTL_STATUS, 1, "RADEON_SE_CNTL_STATUS"},
648 {RADEON_RE_MISC, 1, "RADEON_RE_MISC"},
649 {RADEON_PP_TXFILTER_0, 6, "RADEON_PP_TXFILTER_0"},
650 {RADEON_PP_BORDER_COLOR_0, 1, "RADEON_PP_BORDER_COLOR_0"},
651 {RADEON_PP_TXFILTER_1, 6, "RADEON_PP_TXFILTER_1"},
652 {RADEON_PP_BORDER_COLOR_1, 1, "RADEON_PP_BORDER_COLOR_1"},
653 {RADEON_PP_TXFILTER_2, 6, "RADEON_PP_TXFILTER_2"},
654 {RADEON_PP_BORDER_COLOR_2, 1, "RADEON_PP_BORDER_COLOR_2"},
655 {RADEON_SE_ZBIAS_FACTOR, 2, "RADEON_SE_ZBIAS_FACTOR"},
656 {RADEON_SE_TCL_OUTPUT_VTX_FMT, 11, "RADEON_SE_TCL_OUTPUT_VTX_FMT"},
657 {RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED, 17,
Dave Airlie4791dc82006-02-18 02:53:36 +0000658 "RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED"},
Dave Airlie97528042005-11-11 10:02:10 +0000659 {R200_PP_TXCBLEND_0, 4, "R200_PP_TXCBLEND_0"},
660 {R200_PP_TXCBLEND_1, 4, "R200_PP_TXCBLEND_1"},
661 {R200_PP_TXCBLEND_2, 4, "R200_PP_TXCBLEND_2"},
662 {R200_PP_TXCBLEND_3, 4, "R200_PP_TXCBLEND_3"},
663 {R200_PP_TXCBLEND_4, 4, "R200_PP_TXCBLEND_4"},
664 {R200_PP_TXCBLEND_5, 4, "R200_PP_TXCBLEND_5"},
665 {R200_PP_TXCBLEND_6, 4, "R200_PP_TXCBLEND_6"},
666 {R200_PP_TXCBLEND_7, 4, "R200_PP_TXCBLEND_7"},
667 {R200_SE_TCL_LIGHT_MODEL_CTL_0, 6, "R200_SE_TCL_LIGHT_MODEL_CTL_0"},
668 {R200_PP_TFACTOR_0, 6, "R200_PP_TFACTOR_0"},
669 {R200_SE_VTX_FMT_0, 4, "R200_SE_VTX_FMT_0"},
670 {R200_SE_VAP_CNTL, 1, "R200_SE_VAP_CNTL"},
671 {R200_SE_TCL_MATRIX_SEL_0, 5, "R200_SE_TCL_MATRIX_SEL_0"},
672 {R200_SE_TCL_TEX_PROC_CTL_2, 5, "R200_SE_TCL_TEX_PROC_CTL_2"},
673 {R200_SE_TCL_UCP_VERT_BLEND_CTL, 1, "R200_SE_TCL_UCP_VERT_BLEND_CTL"},
674 {R200_PP_TXFILTER_0, 6, "R200_PP_TXFILTER_0"},
675 {R200_PP_TXFILTER_1, 6, "R200_PP_TXFILTER_1"},
676 {R200_PP_TXFILTER_2, 6, "R200_PP_TXFILTER_2"},
677 {R200_PP_TXFILTER_3, 6, "R200_PP_TXFILTER_3"},
678 {R200_PP_TXFILTER_4, 6, "R200_PP_TXFILTER_4"},
679 {R200_PP_TXFILTER_5, 6, "R200_PP_TXFILTER_5"},
680 {R200_PP_TXOFFSET_0, 1, "R200_PP_TXOFFSET_0"},
681 {R200_PP_TXOFFSET_1, 1, "R200_PP_TXOFFSET_1"},
682 {R200_PP_TXOFFSET_2, 1, "R200_PP_TXOFFSET_2"},
683 {R200_PP_TXOFFSET_3, 1, "R200_PP_TXOFFSET_3"},
684 {R200_PP_TXOFFSET_4, 1, "R200_PP_TXOFFSET_4"},
685 {R200_PP_TXOFFSET_5, 1, "R200_PP_TXOFFSET_5"},
686 {R200_SE_VTE_CNTL, 1, "R200_SE_VTE_CNTL"},
687 {R200_SE_TCL_OUTPUT_VTX_COMP_SEL, 1,
688 "R200_SE_TCL_OUTPUT_VTX_COMP_SEL"},
689 {R200_PP_TAM_DEBUG3, 1, "R200_PP_TAM_DEBUG3"},
690 {R200_PP_CNTL_X, 1, "R200_PP_CNTL_X"},
691 {R200_RB3D_DEPTHXY_OFFSET, 1, "R200_RB3D_DEPTHXY_OFFSET"},
692 {R200_RE_AUX_SCISSOR_CNTL, 1, "R200_RE_AUX_SCISSOR_CNTL"},
693 {R200_RE_SCISSOR_TL_0, 2, "R200_RE_SCISSOR_TL_0"},
694 {R200_RE_SCISSOR_TL_1, 2, "R200_RE_SCISSOR_TL_1"},
695 {R200_RE_SCISSOR_TL_2, 2, "R200_RE_SCISSOR_TL_2"},
696 {R200_SE_VAP_CNTL_STATUS, 1, "R200_SE_VAP_CNTL_STATUS"},
697 {R200_SE_VTX_STATE_CNTL, 1, "R200_SE_VTX_STATE_CNTL"},
698 {R200_RE_POINTSIZE, 1, "R200_RE_POINTSIZE"},
699 {R200_SE_TCL_INPUT_VTX_VECTOR_ADDR_0, 4,
Dave Airlie4791dc82006-02-18 02:53:36 +0000700 "R200_SE_TCL_INPUT_VTX_VECTOR_ADDR_0"},
Dave Airlie97528042005-11-11 10:02:10 +0000701 {R200_PP_CUBIC_FACES_0, 1, "R200_PP_CUBIC_FACES_0"}, /* 61 */
702 {R200_PP_CUBIC_OFFSET_F1_0, 5, "R200_PP_CUBIC_OFFSET_F1_0"}, /* 62 */
703 {R200_PP_CUBIC_FACES_1, 1, "R200_PP_CUBIC_FACES_1"},
704 {R200_PP_CUBIC_OFFSET_F1_1, 5, "R200_PP_CUBIC_OFFSET_F1_1"},
705 {R200_PP_CUBIC_FACES_2, 1, "R200_PP_CUBIC_FACES_2"},
706 {R200_PP_CUBIC_OFFSET_F1_2, 5, "R200_PP_CUBIC_OFFSET_F1_2"},
707 {R200_PP_CUBIC_FACES_3, 1, "R200_PP_CUBIC_FACES_3"},
708 {R200_PP_CUBIC_OFFSET_F1_3, 5, "R200_PP_CUBIC_OFFSET_F1_3"},
709 {R200_PP_CUBIC_FACES_4, 1, "R200_PP_CUBIC_FACES_4"},
710 {R200_PP_CUBIC_OFFSET_F1_4, 5, "R200_PP_CUBIC_OFFSET_F1_4"},
711 {R200_PP_CUBIC_FACES_5, 1, "R200_PP_CUBIC_FACES_5"},
712 {R200_PP_CUBIC_OFFSET_F1_5, 5, "R200_PP_CUBIC_OFFSET_F1_5"},
713 {RADEON_PP_TEX_SIZE_0, 2, "RADEON_PP_TEX_SIZE_0"},
714 {RADEON_PP_TEX_SIZE_1, 2, "RADEON_PP_TEX_SIZE_1"},
715 {RADEON_PP_TEX_SIZE_2, 2, "RADEON_PP_TEX_SIZE_2"},
716 {R200_RB3D_BLENDCOLOR, 3, "R200_RB3D_BLENDCOLOR"},
717 {R200_SE_TCL_POINT_SPRITE_CNTL, 1, "R200_SE_TCL_POINT_SPRITE_CNTL"},
718 {RADEON_PP_CUBIC_FACES_0, 1, "RADEON_PP_CUBIC_FACES_0"},
719 {RADEON_PP_CUBIC_OFFSET_T0_0, 5, "RADEON_PP_CUBIC_OFFSET_T0_0"},
720 {RADEON_PP_CUBIC_FACES_1, 1, "RADEON_PP_CUBIC_FACES_1"},
721 {RADEON_PP_CUBIC_OFFSET_T1_0, 5, "RADEON_PP_CUBIC_OFFSET_T1_0"},
722 {RADEON_PP_CUBIC_FACES_2, 1, "RADEON_PP_CUBIC_FACES_2"},
723 {RADEON_PP_CUBIC_OFFSET_T2_0, 5, "RADEON_PP_CUBIC_OFFSET_T2_0"},
724 {R200_PP_TRI_PERF, 2, "R200_PP_TRI_PERF"},
725 {R200_PP_AFS_0, 32, "R200_PP_AFS_0"}, /* 85 */
726 {R200_PP_AFS_1, 32, "R200_PP_AFS_1"},
727 {R200_PP_TFACTOR_0, 8, "R200_ATF_TFACTOR"},
728 {R200_PP_TXFILTER_0, 8, "R200_PP_TXCTLALL_0"},
729 {R200_PP_TXFILTER_1, 8, "R200_PP_TXCTLALL_1"},
730 {R200_PP_TXFILTER_2, 8, "R200_PP_TXCTLALL_2"},
731 {R200_PP_TXFILTER_3, 8, "R200_PP_TXCTLALL_3"},
732 {R200_PP_TXFILTER_4, 8, "R200_PP_TXCTLALL_4"},
733 {R200_PP_TXFILTER_5, 8, "R200_PP_TXCTLALL_5"},
Roland Scheideggerf4e6e442006-05-24 18:36:24 +0000734 {R200_VAP_PVS_CNTL_1, 2, "R200_VAP_PVS_CNTL"},
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000735};
Keith Whitwell2dcada32002-06-12 15:50:28 +0000736
Kevin E Martin0994e632001-01-05 22:57:55 +0000737/* ================================================================
738 * Performance monitoring functions
739 */
740
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000741static void radeon_clear_box(drm_radeon_private_t * dev_priv,
742 int x, int y, int w, int h, int r, int g, int b)
Kevin E Martin0994e632001-01-05 22:57:55 +0000743{
Kevin E Martin0994e632001-01-05 22:57:55 +0000744 u32 color;
745 RING_LOCALS;
746
Keith Whitwell48cc3502002-08-26 22:16:18 +0000747 x += dev_priv->sarea_priv->boxes[0].x1;
748 y += dev_priv->sarea_priv->boxes[0].y1;
749
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000750 switch (dev_priv->color_fmt) {
Kevin E Martin0994e632001-01-05 22:57:55 +0000751 case RADEON_COLOR_FORMAT_RGB565:
752 color = (((r & 0xf8) << 8) |
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000753 ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
Kevin E Martin0994e632001-01-05 22:57:55 +0000754 break;
755 case RADEON_COLOR_FORMAT_ARGB8888:
756 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000757 color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
Kevin E Martin0994e632001-01-05 22:57:55 +0000758 break;
759 }
760
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000761 BEGIN_RING(4);
762 RADEON_WAIT_UNTIL_3D_IDLE();
763 OUT_RING(CP_PACKET0(RADEON_DP_WRITE_MASK, 0));
764 OUT_RING(0xffffffff);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000765 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000766
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000767 BEGIN_RING(6);
Kevin E Martin0994e632001-01-05 22:57:55 +0000768
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000769 OUT_RING(CP_PACKET3(RADEON_CNTL_PAINT_MULTI, 4));
770 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
771 RADEON_GMC_BRUSH_SOLID_COLOR |
772 (dev_priv->color_fmt << 8) |
773 RADEON_GMC_SRC_DATATYPE_COLOR |
774 RADEON_ROP3_P | RADEON_GMC_CLR_CMP_CNTL_DIS);
Kevin E Martin0994e632001-01-05 22:57:55 +0000775
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000776 if (dev_priv->page_flipping && dev_priv->current_page == 1) {
777 OUT_RING(dev_priv->front_pitch_offset);
778 } else {
779 OUT_RING(dev_priv->back_pitch_offset);
780 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000781
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000782 OUT_RING(color);
Kevin E Martin0994e632001-01-05 22:57:55 +0000783
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000784 OUT_RING((x << 16) | y);
785 OUT_RING((w << 16) | h);
Kevin E Martin0994e632001-01-05 22:57:55 +0000786
787 ADVANCE_RING();
788}
789
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000790static void radeon_cp_performance_boxes(drm_radeon_private_t * dev_priv)
Kevin E Martin0994e632001-01-05 22:57:55 +0000791{
Keith Whitwell48cc3502002-08-26 22:16:18 +0000792 /* Collapse various things into a wait flag -- trying to
793 * guess if userspase slept -- better just to have them tell us.
794 */
795 if (dev_priv->stats.last_frame_reads > 1 ||
796 dev_priv->stats.last_clear_reads > dev_priv->stats.clears) {
797 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
Kevin E Martin0994e632001-01-05 22:57:55 +0000798 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000799
800 if (dev_priv->stats.freelist_loops) {
801 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
802 }
803
804 /* Purple box for page flipping
805 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000806 if (dev_priv->stats.boxes & RADEON_BOX_FLIP)
807 radeon_clear_box(dev_priv, 4, 4, 8, 8, 255, 0, 255);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000808
809 /* Red box if we have to wait for idle at any point
810 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000811 if (dev_priv->stats.boxes & RADEON_BOX_WAIT_IDLE)
812 radeon_clear_box(dev_priv, 16, 4, 8, 8, 255, 0, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000813
814 /* Blue box: lost context?
815 */
816
817 /* Yellow box for texture swaps
818 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000819 if (dev_priv->stats.boxes & RADEON_BOX_TEXTURE_LOAD)
820 radeon_clear_box(dev_priv, 40, 4, 8, 8, 255, 255, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000821
822 /* Green box if hardware never idles (as far as we can tell)
823 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000824 if (!(dev_priv->stats.boxes & RADEON_BOX_DMA_IDLE))
825 radeon_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000826
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000827 /* Draw bars indicating number of buffers allocated
Keith Whitwell48cc3502002-08-26 22:16:18 +0000828 * (not a great measure, easily confused)
829 */
830 if (dev_priv->stats.requested_bufs) {
831 if (dev_priv->stats.requested_bufs > 100)
832 dev_priv->stats.requested_bufs = 100;
833
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000834 radeon_clear_box(dev_priv, 4, 16,
835 dev_priv->stats.requested_bufs, 4,
836 196, 128, 128);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000837 }
838
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000839 memset(&dev_priv->stats, 0, sizeof(dev_priv->stats));
Keith Whitwell48cc3502002-08-26 22:16:18 +0000840
Kevin E Martin0994e632001-01-05 22:57:55 +0000841}
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000842
Kevin E Martin0994e632001-01-05 22:57:55 +0000843/* ================================================================
844 * CP command dispatch functions
845 */
846
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000847static void radeon_cp_dispatch_clear(drm_device_t * dev,
848 drm_radeon_clear_t * clear,
849 drm_radeon_clear_rect_t * depth_boxes)
Kevin E Martin0994e632001-01-05 22:57:55 +0000850{
851 drm_radeon_private_t *dev_priv = dev->dev_private;
852 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +0000853 drm_radeon_depth_clear_t *depth_clear = &dev_priv->depth_clear;
Kevin E Martin0994e632001-01-05 22:57:55 +0000854 int nbox = sarea_priv->nbox;
855 drm_clip_rect_t *pbox = sarea_priv->boxes;
856 unsigned int flags = clear->flags;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000857 u32 rb3d_cntl = 0, rb3d_stencilrefmask = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000858 int i;
859 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000860 DRM_DEBUG("flags = 0x%x\n", flags);
Kevin E Martin0994e632001-01-05 22:57:55 +0000861
Keith Whitwell48cc3502002-08-26 22:16:18 +0000862 dev_priv->stats.clears++;
863
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000864 if (dev_priv->page_flipping && dev_priv->current_page == 1) {
Kevin E Martin0994e632001-01-05 22:57:55 +0000865 unsigned int tmp = flags;
866
867 flags &= ~(RADEON_FRONT | RADEON_BACK);
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000868 if (tmp & RADEON_FRONT)
869 flags |= RADEON_BACK;
870 if (tmp & RADEON_BACK)
871 flags |= RADEON_FRONT;
Kevin E Martin0994e632001-01-05 22:57:55 +0000872 }
873
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000874 if (flags & (RADEON_FRONT | RADEON_BACK)) {
Keith Whitwell48cc3502002-08-26 22:16:18 +0000875
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000876 BEGIN_RING(4);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000877
878 /* Ensure the 3D stream is idle before doing a
879 * 2D fill to clear the front or back buffer.
880 */
881 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000882
883 OUT_RING(CP_PACKET0(RADEON_DP_WRITE_MASK, 0));
884 OUT_RING(clear->color_mask);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000885
886 ADVANCE_RING();
887
888 /* Make sure we restore the 3D state next time.
889 */
890 dev_priv->sarea_priv->ctx_owner = 0;
891
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000892 for (i = 0; i < nbox; i++) {
Keith Whitwell48cc3502002-08-26 22:16:18 +0000893 int x = pbox[i].x1;
894 int y = pbox[i].y1;
895 int w = pbox[i].x2 - x;
896 int h = pbox[i].y2 - y;
897
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000898 DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
899 x, y, w, h, flags);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000900
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000901 if (flags & RADEON_FRONT) {
902 BEGIN_RING(6);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000903
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000904 OUT_RING(CP_PACKET3
905 (RADEON_CNTL_PAINT_MULTI, 4));
906 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
907 RADEON_GMC_BRUSH_SOLID_COLOR |
908 (dev_priv->
909 color_fmt << 8) |
910 RADEON_GMC_SRC_DATATYPE_COLOR |
911 RADEON_ROP3_P |
912 RADEON_GMC_CLR_CMP_CNTL_DIS);
913
914 OUT_RING(dev_priv->front_pitch_offset);
915 OUT_RING(clear->clear_color);
916
917 OUT_RING((x << 16) | y);
918 OUT_RING((w << 16) | h);
919
Keith Whitwell48cc3502002-08-26 22:16:18 +0000920 ADVANCE_RING();
921 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000922
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000923 if (flags & RADEON_BACK) {
924 BEGIN_RING(6);
925
926 OUT_RING(CP_PACKET3
927 (RADEON_CNTL_PAINT_MULTI, 4));
928 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
929 RADEON_GMC_BRUSH_SOLID_COLOR |
930 (dev_priv->
931 color_fmt << 8) |
932 RADEON_GMC_SRC_DATATYPE_COLOR |
933 RADEON_ROP3_P |
934 RADEON_GMC_CLR_CMP_CNTL_DIS);
935
936 OUT_RING(dev_priv->back_pitch_offset);
937 OUT_RING(clear->clear_color);
938
939 OUT_RING((x << 16) | y);
940 OUT_RING((w << 16) | h);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000941
942 ADVANCE_RING();
943 }
944 }
945 }
946
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000947 /* hyper z clear */
948 /* no docs available, based on reverse engeneering by Stephane Marchesin */
Dave Airlie4791dc82006-02-18 02:53:36 +0000949 if ((flags & (RADEON_DEPTH | RADEON_STENCIL))
950 && (flags & RADEON_CLEAR_FASTZ)) {
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000951
952 int i;
Dave Airlie4791dc82006-02-18 02:53:36 +0000953 int depthpixperline =
954 dev_priv->depth_fmt ==
955 RADEON_DEPTH_FORMAT_16BIT_INT_Z ? (dev_priv->depth_pitch /
956 2) : (dev_priv->
957 depth_pitch / 4);
958
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000959 u32 clearmask;
960
961 u32 tempRB3D_DEPTHCLEARVALUE = clear->clear_depth |
Dave Airlie4791dc82006-02-18 02:53:36 +0000962 ((clear->depth_mask & 0xff) << 24);
963
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000964 /* Make sure we restore the 3D state next time.
965 * we haven't touched any "normal" state - still need this?
966 */
967 dev_priv->sarea_priv->ctx_owner = 0;
968
Dave Airlie3cc64a92006-09-12 06:13:14 +1000969 if ((dev_priv->flags & RADEON_HAS_HIERZ)
Dave Airlie4791dc82006-02-18 02:53:36 +0000970 && (flags & RADEON_USE_HIERZ)) {
971 /* FIXME : reverse engineer that for Rx00 cards */
972 /* FIXME : the mask supposedly contains low-res z values. So can't set
973 just to the max (0xff? or actually 0x3fff?), need to take z clear
974 value into account? */
975 /* pattern seems to work for r100, though get slight
976 rendering errors with glxgears. If hierz is not enabled for r100,
977 only 4 bits which indicate clear (15,16,31,32, all zero) matter, the
978 other ones are ignored, and the same clear mask can be used. That's
979 very different behaviour than R200 which needs different clear mask
980 and different number of tiles to clear if hierz is enabled or not !?!
981 */
982 clearmask = (0xff << 22) | (0xff << 6) | 0x003f003f;
983 } else {
984 /* clear mask : chooses the clearing pattern.
985 rv250: could be used to clear only parts of macrotiles
986 (but that would get really complicated...)?
987 bit 0 and 1 (either or both of them ?!?!) are used to
988 not clear tile (or maybe one of the bits indicates if the tile is
989 compressed or not), bit 2 and 3 to not clear tile 1,...,.
990 Pattern is as follows:
991 | 0,1 | 4,5 | 8,9 |12,13|16,17|20,21|24,25|28,29|
992 bits -------------------------------------------------
993 | 2,3 | 6,7 |10,11|14,15|18,19|22,23|26,27|30,31|
994 rv100: clearmask covers 2x8 4x1 tiles, but one clear still
995 covers 256 pixels ?!?
996 */
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000997 clearmask = 0x0;
998 }
999
Dave Airlie4791dc82006-02-18 02:53:36 +00001000 BEGIN_RING(8);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001001 RADEON_WAIT_UNTIL_2D_IDLE();
Dave Airlie4791dc82006-02-18 02:53:36 +00001002 OUT_RING_REG(RADEON_RB3D_DEPTHCLEARVALUE,
1003 tempRB3D_DEPTHCLEARVALUE);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001004 /* what offset is this exactly ? */
Dave Airlie4791dc82006-02-18 02:53:36 +00001005 OUT_RING_REG(RADEON_RB3D_ZMASKOFFSET, 0);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001006 /* need ctlstat, otherwise get some strange black flickering */
Dave Airlie4791dc82006-02-18 02:53:36 +00001007 OUT_RING_REG(RADEON_RB3D_ZCACHE_CTLSTAT,
1008 RADEON_RB3D_ZC_FLUSH_ALL);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001009 ADVANCE_RING();
1010
1011 for (i = 0; i < nbox; i++) {
1012 int tileoffset, nrtilesx, nrtilesy, j;
1013 /* it looks like r200 needs rv-style clears, at least if hierz is not enabled? */
Dave Airlie3cc64a92006-09-12 06:13:14 +10001014 if ((dev_priv->flags & RADEON_HAS_HIERZ)
Dave Airlie4791dc82006-02-18 02:53:36 +00001015 && !(dev_priv->microcode_version == UCODE_R200)) {
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001016 /* FIXME : figure this out for r200 (when hierz is enabled). Or
1017 maybe r200 actually doesn't need to put the low-res z value into
1018 the tile cache like r100, but just needs to clear the hi-level z-buffer?
1019 Works for R100, both with hierz and without.
1020 R100 seems to operate on 2x1 8x8 tiles, but...
1021 odd: offset/nrtiles need to be 64 pix (4 block) aligned? Potentially
1022 problematic with resolutions which are not 64 pix aligned? */
Dave Airlie4791dc82006-02-18 02:53:36 +00001023 tileoffset =
1024 ((pbox[i].y1 >> 3) * depthpixperline +
1025 pbox[i].x1) >> 6;
1026 nrtilesx =
1027 ((pbox[i].x2 & ~63) -
1028 (pbox[i].x1 & ~63)) >> 4;
1029 nrtilesy =
1030 (pbox[i].y2 >> 3) - (pbox[i].y1 >> 3);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001031 for (j = 0; j <= nrtilesy; j++) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001032 BEGIN_RING(4);
1033 OUT_RING(CP_PACKET3
1034 (RADEON_3D_CLEAR_ZMASK, 2));
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001035 /* first tile */
Dave Airlie4791dc82006-02-18 02:53:36 +00001036 OUT_RING(tileoffset * 8);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001037 /* the number of tiles to clear */
Dave Airlie4791dc82006-02-18 02:53:36 +00001038 OUT_RING(nrtilesx + 4);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001039 /* clear mask : chooses the clearing pattern. */
Dave Airlie4791dc82006-02-18 02:53:36 +00001040 OUT_RING(clearmask);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001041 ADVANCE_RING();
1042 tileoffset += depthpixperline >> 6;
1043 }
Dave Airlie4791dc82006-02-18 02:53:36 +00001044 } else if (dev_priv->microcode_version == UCODE_R200) {
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001045 /* works for rv250. */
1046 /* find first macro tile (8x2 4x4 z-pixels on rv250) */
Dave Airlie4791dc82006-02-18 02:53:36 +00001047 tileoffset =
1048 ((pbox[i].y1 >> 3) * depthpixperline +
1049 pbox[i].x1) >> 5;
1050 nrtilesx =
1051 (pbox[i].x2 >> 5) - (pbox[i].x1 >> 5);
1052 nrtilesy =
1053 (pbox[i].y2 >> 3) - (pbox[i].y1 >> 3);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001054 for (j = 0; j <= nrtilesy; j++) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001055 BEGIN_RING(4);
1056 OUT_RING(CP_PACKET3
1057 (RADEON_3D_CLEAR_ZMASK, 2));
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001058 /* first tile */
1059 /* judging by the first tile offset needed, could possibly
1060 directly address/clear 4x4 tiles instead of 8x2 * 4x4
1061 macro tiles, though would still need clear mask for
1062 right/bottom if truely 4x4 granularity is desired ? */
Dave Airlie4791dc82006-02-18 02:53:36 +00001063 OUT_RING(tileoffset * 16);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001064 /* the number of tiles to clear */
Dave Airlie4791dc82006-02-18 02:53:36 +00001065 OUT_RING(nrtilesx + 1);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001066 /* clear mask : chooses the clearing pattern. */
Dave Airlie4791dc82006-02-18 02:53:36 +00001067 OUT_RING(clearmask);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001068 ADVANCE_RING();
1069 tileoffset += depthpixperline >> 5;
1070 }
Dave Airlie4791dc82006-02-18 02:53:36 +00001071 } else { /* rv 100 */
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001072 /* rv100 might not need 64 pix alignment, who knows */
1073 /* offsets are, hmm, weird */
Dave Airlie4791dc82006-02-18 02:53:36 +00001074 tileoffset =
1075 ((pbox[i].y1 >> 4) * depthpixperline +
1076 pbox[i].x1) >> 6;
1077 nrtilesx =
1078 ((pbox[i].x2 & ~63) -
1079 (pbox[i].x1 & ~63)) >> 4;
1080 nrtilesy =
1081 (pbox[i].y2 >> 4) - (pbox[i].y1 >> 4);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001082 for (j = 0; j <= nrtilesy; j++) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001083 BEGIN_RING(4);
1084 OUT_RING(CP_PACKET3
1085 (RADEON_3D_CLEAR_ZMASK, 2));
1086 OUT_RING(tileoffset * 128);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001087 /* the number of tiles to clear */
Dave Airlie4791dc82006-02-18 02:53:36 +00001088 OUT_RING(nrtilesx + 4);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001089 /* clear mask : chooses the clearing pattern. */
Dave Airlie4791dc82006-02-18 02:53:36 +00001090 OUT_RING(clearmask);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001091 ADVANCE_RING();
1092 tileoffset += depthpixperline >> 6;
1093 }
1094 }
1095 }
1096
1097 /* TODO don't always clear all hi-level z tiles */
Dave Airlie3cc64a92006-09-12 06:13:14 +10001098 if ((dev_priv->flags & RADEON_HAS_HIERZ)
Dave Airlie4791dc82006-02-18 02:53:36 +00001099 && (dev_priv->microcode_version == UCODE_R200)
1100 && (flags & RADEON_USE_HIERZ))
1101 /* r100 and cards without hierarchical z-buffer have no high-level z-buffer */
1102 /* FIXME : the mask supposedly contains low-res z values. So can't set
1103 just to the max (0xff? or actually 0x3fff?), need to take z clear
1104 value into account? */
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001105 {
Dave Airlie4791dc82006-02-18 02:53:36 +00001106 BEGIN_RING(4);
1107 OUT_RING(CP_PACKET3(RADEON_3D_CLEAR_HIZ, 2));
1108 OUT_RING(0x0); /* First tile */
1109 OUT_RING(0x3cc0);
1110 OUT_RING((0xff << 22) | (0xff << 6) | 0x003f003f);
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001111 ADVANCE_RING();
1112 }
1113 }
1114
David Dawesab87c5d2002-02-14 02:00:26 +00001115 /* We have to clear the depth and/or stencil buffers by
1116 * rendering a quad into just those buffers. Thus, we have to
1117 * make sure the 3D engine is configured correctly.
1118 */
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001119 else if ((dev_priv->microcode_version == UCODE_R200) &&
1120 (flags & (RADEON_DEPTH | RADEON_STENCIL))) {
Keith Whitwell48cc3502002-08-26 22:16:18 +00001121
1122 int tempPP_CNTL;
1123 int tempRE_CNTL;
1124 int tempRB3D_CNTL;
1125 int tempRB3D_ZSTENCILCNTL;
1126 int tempRB3D_STENCILREFMASK;
1127 int tempRB3D_PLANEMASK;
1128 int tempSE_CNTL;
1129 int tempSE_VTE_CNTL;
1130 int tempSE_VTX_FMT_0;
1131 int tempSE_VTX_FMT_1;
1132 int tempSE_VAP_CNTL;
1133 int tempRE_AUX_SCISSOR_CNTL;
1134
1135 tempPP_CNTL = 0;
1136 tempRE_CNTL = 0;
1137
1138 tempRB3D_CNTL = depth_clear->rb3d_cntl;
Keith Whitwell48cc3502002-08-26 22:16:18 +00001139
1140 tempRB3D_ZSTENCILCNTL = depth_clear->rb3d_zstencilcntl;
1141 tempRB3D_STENCILREFMASK = 0x0;
1142
1143 tempSE_CNTL = depth_clear->se_cntl;
1144
Keith Whitwell48cc3502002-08-26 22:16:18 +00001145 /* Disable TCL */
1146
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001147 tempSE_VAP_CNTL = ( /* SE_VAP_CNTL__FORCE_W_TO_ONE_MASK | */
1148 (0x9 <<
1149 SE_VAP_CNTL__VF_MAX_VTX_NUM__SHIFT));
Keith Whitwell48cc3502002-08-26 22:16:18 +00001150
1151 tempRB3D_PLANEMASK = 0x0;
1152
1153 tempRE_AUX_SCISSOR_CNTL = 0x0;
1154
1155 tempSE_VTE_CNTL =
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001156 SE_VTE_CNTL__VTX_XY_FMT_MASK | SE_VTE_CNTL__VTX_Z_FMT_MASK;
Keith Whitwell48cc3502002-08-26 22:16:18 +00001157
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001158 /* Vertex format (X, Y, Z, W) */
Keith Whitwell48cc3502002-08-26 22:16:18 +00001159 tempSE_VTX_FMT_0 =
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001160 SE_VTX_FMT_0__VTX_Z0_PRESENT_MASK |
1161 SE_VTX_FMT_0__VTX_W0_PRESENT_MASK;
Keith Whitwell48cc3502002-08-26 22:16:18 +00001162 tempSE_VTX_FMT_1 = 0x0;
1163
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001164 /*
1165 * Depth buffer specific enables
Keith Whitwell48cc3502002-08-26 22:16:18 +00001166 */
1167 if (flags & RADEON_DEPTH) {
1168 /* Enable depth buffer */
1169 tempRB3D_CNTL |= RADEON_Z_ENABLE;
1170 } else {
1171 /* Disable depth buffer */
1172 tempRB3D_CNTL &= ~RADEON_Z_ENABLE;
1173 }
1174
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001175 /*
Keith Whitwell48cc3502002-08-26 22:16:18 +00001176 * Stencil buffer specific enables
1177 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001178 if (flags & RADEON_STENCIL) {
1179 tempRB3D_CNTL |= RADEON_STENCIL_ENABLE;
1180 tempRB3D_STENCILREFMASK = clear->depth_mask;
Keith Whitwell48cc3502002-08-26 22:16:18 +00001181 } else {
1182 tempRB3D_CNTL &= ~RADEON_STENCIL_ENABLE;
1183 tempRB3D_STENCILREFMASK = 0x00000000;
1184 }
1185
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001186 if (flags & RADEON_USE_COMP_ZBUF) {
1187 tempRB3D_ZSTENCILCNTL |= RADEON_Z_COMPRESSION_ENABLE |
Dave Airlie4791dc82006-02-18 02:53:36 +00001188 RADEON_Z_DECOMPRESSION_ENABLE;
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001189 }
1190 if (flags & RADEON_USE_HIERZ) {
1191 tempRB3D_ZSTENCILCNTL |= RADEON_Z_HIERARCHY_ENABLE;
1192 }
1193
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001194 BEGIN_RING(26);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001195 RADEON_WAIT_UNTIL_2D_IDLE();
1196
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001197 OUT_RING_REG(RADEON_PP_CNTL, tempPP_CNTL);
1198 OUT_RING_REG(R200_RE_CNTL, tempRE_CNTL);
1199 OUT_RING_REG(RADEON_RB3D_CNTL, tempRB3D_CNTL);
1200 OUT_RING_REG(RADEON_RB3D_ZSTENCILCNTL, tempRB3D_ZSTENCILCNTL);
1201 OUT_RING_REG(RADEON_RB3D_STENCILREFMASK,
1202 tempRB3D_STENCILREFMASK);
1203 OUT_RING_REG(RADEON_RB3D_PLANEMASK, tempRB3D_PLANEMASK);
1204 OUT_RING_REG(RADEON_SE_CNTL, tempSE_CNTL);
1205 OUT_RING_REG(R200_SE_VTE_CNTL, tempSE_VTE_CNTL);
1206 OUT_RING_REG(R200_SE_VTX_FMT_0, tempSE_VTX_FMT_0);
1207 OUT_RING_REG(R200_SE_VTX_FMT_1, tempSE_VTX_FMT_1);
1208 OUT_RING_REG(R200_SE_VAP_CNTL, tempSE_VAP_CNTL);
1209 OUT_RING_REG(R200_RE_AUX_SCISSOR_CNTL, tempRE_AUX_SCISSOR_CNTL);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001210 ADVANCE_RING();
1211
1212 /* Make sure we restore the 3D state next time.
1213 */
1214 dev_priv->sarea_priv->ctx_owner = 0;
1215
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001216 for (i = 0; i < nbox; i++) {
1217
1218 /* Funny that this should be required --
Keith Whitwell48cc3502002-08-26 22:16:18 +00001219 * sets top-left?
1220 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001221 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001222
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001223 BEGIN_RING(14);
1224 OUT_RING(CP_PACKET3(R200_3D_DRAW_IMMD_2, 12));
1225 OUT_RING((RADEON_PRIM_TYPE_RECT_LIST |
1226 RADEON_PRIM_WALK_RING |
1227 (3 << RADEON_NUM_VERTICES_SHIFT)));
1228 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1229 OUT_RING(depth_boxes[i].ui[CLEAR_Y1]);
1230 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1231 OUT_RING(0x3f800000);
1232 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1233 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1234 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1235 OUT_RING(0x3f800000);
1236 OUT_RING(depth_boxes[i].ui[CLEAR_X2]);
1237 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1238 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1239 OUT_RING(0x3f800000);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001240 ADVANCE_RING();
1241 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001242 } else if ((flags & (RADEON_DEPTH | RADEON_STENCIL))) {
Keith Whitwell48cc3502002-08-26 22:16:18 +00001243
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001244 int tempRB3D_ZSTENCILCNTL = depth_clear->rb3d_zstencilcntl;
Dave Airlie4791dc82006-02-18 02:53:36 +00001245
David Dawesab87c5d2002-02-14 02:00:26 +00001246 rb3d_cntl = depth_clear->rb3d_cntl;
1247
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001248 if (flags & RADEON_DEPTH) {
1249 rb3d_cntl |= RADEON_Z_ENABLE;
David Dawesab87c5d2002-02-14 02:00:26 +00001250 } else {
1251 rb3d_cntl &= ~RADEON_Z_ENABLE;
1252 }
1253
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001254 if (flags & RADEON_STENCIL) {
1255 rb3d_cntl |= RADEON_STENCIL_ENABLE;
1256 rb3d_stencilrefmask = clear->depth_mask; /* misnamed field */
David Dawesab87c5d2002-02-14 02:00:26 +00001257 } else {
1258 rb3d_cntl &= ~RADEON_STENCIL_ENABLE;
1259 rb3d_stencilrefmask = 0x00000000;
1260 }
David Dawesab87c5d2002-02-14 02:00:26 +00001261
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001262 if (flags & RADEON_USE_COMP_ZBUF) {
1263 tempRB3D_ZSTENCILCNTL |= RADEON_Z_COMPRESSION_ENABLE |
Dave Airlie4791dc82006-02-18 02:53:36 +00001264 RADEON_Z_DECOMPRESSION_ENABLE;
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001265 }
1266 if (flags & RADEON_USE_HIERZ) {
1267 tempRB3D_ZSTENCILCNTL |= RADEON_Z_HIERARCHY_ENABLE;
1268 }
1269
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001270 BEGIN_RING(13);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001271 RADEON_WAIT_UNTIL_2D_IDLE();
Kevin E Martin0994e632001-01-05 22:57:55 +00001272
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001273 OUT_RING(CP_PACKET0(RADEON_PP_CNTL, 1));
1274 OUT_RING(0x00000000);
1275 OUT_RING(rb3d_cntl);
1276
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001277 OUT_RING_REG(RADEON_RB3D_ZSTENCILCNTL, tempRB3D_ZSTENCILCNTL);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001278 OUT_RING_REG(RADEON_RB3D_STENCILREFMASK, rb3d_stencilrefmask);
1279 OUT_RING_REG(RADEON_RB3D_PLANEMASK, 0x00000000);
1280 OUT_RING_REG(RADEON_SE_CNTL, depth_clear->se_cntl);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001281 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001282
Keith Whitwell48cc3502002-08-26 22:16:18 +00001283 /* Make sure we restore the 3D state next time.
1284 */
1285 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001286
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001287 for (i = 0; i < nbox; i++) {
1288
1289 /* Funny that this should be required --
Keith Whitwell48cc3502002-08-26 22:16:18 +00001290 * sets top-left?
Kevin E Martin0994e632001-01-05 22:57:55 +00001291 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001292 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Kevin E Martin0994e632001-01-05 22:57:55 +00001293
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001294 BEGIN_RING(15);
Kevin E Martin0994e632001-01-05 22:57:55 +00001295
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001296 OUT_RING(CP_PACKET3(RADEON_3D_DRAW_IMMD, 13));
1297 OUT_RING(RADEON_VTX_Z_PRESENT |
1298 RADEON_VTX_PKCOLOR_PRESENT);
1299 OUT_RING((RADEON_PRIM_TYPE_RECT_LIST |
1300 RADEON_PRIM_WALK_RING |
1301 RADEON_MAOS_ENABLE |
1302 RADEON_VTX_FMT_RADEON_MODE |
1303 (3 << RADEON_NUM_VERTICES_SHIFT)));
Kevin E Martin0994e632001-01-05 22:57:55 +00001304
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001305 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1306 OUT_RING(depth_boxes[i].ui[CLEAR_Y1]);
1307 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1308 OUT_RING(0x0);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001309
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001310 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1311 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1312 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1313 OUT_RING(0x0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001314
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001315 OUT_RING(depth_boxes[i].ui[CLEAR_X2]);
1316 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1317 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1318 OUT_RING(0x0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001319
1320 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001321 }
1322 }
1323
1324 /* Increment the clear counter. The client-side 3D driver must
1325 * wait on this value before performing the clear ioctl. We
1326 * need this because the card's so damned fast...
1327 */
1328 dev_priv->sarea_priv->last_clear++;
1329
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001330 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001331
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001332 RADEON_CLEAR_AGE(dev_priv->sarea_priv->last_clear);
Kevin E Martin0994e632001-01-05 22:57:55 +00001333 RADEON_WAIT_UNTIL_IDLE();
1334
1335 ADVANCE_RING();
1336}
1337
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001338static void radeon_cp_dispatch_swap(drm_device_t * dev)
Kevin E Martin0994e632001-01-05 22:57:55 +00001339{
1340 drm_radeon_private_t *dev_priv = dev->dev_private;
1341 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1342 int nbox = sarea_priv->nbox;
1343 drm_clip_rect_t *pbox = sarea_priv->boxes;
1344 int i;
1345 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001346 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00001347
Kevin E Martin0994e632001-01-05 22:57:55 +00001348 /* Do some trivial performance monitoring...
1349 */
Keith Whitwell48cc3502002-08-26 22:16:18 +00001350 if (dev_priv->do_boxes)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001351 radeon_cp_performance_boxes(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00001352
1353 /* Wait for the 3D stream to idle before dispatching the bitblt.
1354 * This will prevent data corruption between the two streams.
1355 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001356 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00001357
1358 RADEON_WAIT_UNTIL_3D_IDLE();
1359
1360 ADVANCE_RING();
1361
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001362 for (i = 0; i < nbox; i++) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001363 int x = pbox[i].x1;
1364 int y = pbox[i].y1;
1365 int w = pbox[i].x2 - x;
1366 int h = pbox[i].y2 - y;
1367
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001368 DRM_DEBUG("dispatch swap %d,%d-%d,%d\n", x, y, w, h);
Kevin E Martin0994e632001-01-05 22:57:55 +00001369
Michel Dänzer6ba91272006-09-15 16:37:47 +02001370 BEGIN_RING(9);
Kevin E Martin0994e632001-01-05 22:57:55 +00001371
Michel Dänzer6ba91272006-09-15 16:37:47 +02001372 OUT_RING(CP_PACKET0(RADEON_DP_GUI_MASTER_CNTL, 0));
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001373 OUT_RING(RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
1374 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
1375 RADEON_GMC_BRUSH_NONE |
1376 (dev_priv->color_fmt << 8) |
1377 RADEON_GMC_SRC_DATATYPE_COLOR |
1378 RADEON_ROP3_S |
1379 RADEON_DP_SRC_SOURCE_MEMORY |
1380 RADEON_GMC_CLR_CMP_CNTL_DIS | RADEON_GMC_WR_MSK_DIS);
1381
Keith Whitwell2dcada32002-06-12 15:50:28 +00001382 /* Make this work even if front & back are flipped:
1383 */
Michel Dänzer6ba91272006-09-15 16:37:47 +02001384 OUT_RING(CP_PACKET0(RADEON_SRC_PITCH_OFFSET, 1));
Keith Whitwell2dcada32002-06-12 15:50:28 +00001385 if (dev_priv->current_page == 0) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001386 OUT_RING(dev_priv->back_pitch_offset);
1387 OUT_RING(dev_priv->front_pitch_offset);
1388 } else {
1389 OUT_RING(dev_priv->front_pitch_offset);
1390 OUT_RING(dev_priv->back_pitch_offset);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001391 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001392
Michel Dänzer6ba91272006-09-15 16:37:47 +02001393 OUT_RING(CP_PACKET0(RADEON_SRC_X_Y, 2));
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001394 OUT_RING((x << 16) | y);
1395 OUT_RING((x << 16) | y);
1396 OUT_RING((w << 16) | h);
Kevin E Martin0994e632001-01-05 22:57:55 +00001397
1398 ADVANCE_RING();
1399 }
1400
1401 /* Increment the frame counter. The client-side 3D driver must
1402 * throttle the framerate by waiting for this value before
1403 * performing the swapbuffer ioctl.
1404 */
1405 dev_priv->sarea_priv->last_frame++;
1406
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001407 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001408
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001409 RADEON_FRAME_AGE(dev_priv->sarea_priv->last_frame);
Kevin E Martin0994e632001-01-05 22:57:55 +00001410 RADEON_WAIT_UNTIL_2D_IDLE();
1411
1412 ADVANCE_RING();
1413}
1414
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001415static void radeon_cp_dispatch_flip(drm_device_t * dev)
Kevin E Martin0994e632001-01-05 22:57:55 +00001416{
1417 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001418 drm_sarea_t *sarea = (drm_sarea_t *) dev_priv->sarea->handle;
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001419 int offset = (dev_priv->current_page == 1)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001420 ? dev_priv->front_offset : dev_priv->back_offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001421 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001422 DRM_DEBUG("%s: page=%d pfCurrentPage=%d\n",
1423 __FUNCTION__,
1424 dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
Kevin E Martin0994e632001-01-05 22:57:55 +00001425
Kevin E Martin0994e632001-01-05 22:57:55 +00001426 /* Do some trivial performance monitoring...
1427 */
Keith Whitwell48cc3502002-08-26 22:16:18 +00001428 if (dev_priv->do_boxes) {
1429 dev_priv->stats.boxes |= RADEON_BOX_FLIP;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001430 radeon_cp_performance_boxes(dev_priv);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001431 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001432
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001433 /* Update the frame offsets for both CRTCs
1434 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001435 BEGIN_RING(6);
Kevin E Martin0994e632001-01-05 22:57:55 +00001436
1437 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001438 OUT_RING_REG(RADEON_CRTC_OFFSET,
1439 ((sarea->frame.y * dev_priv->front_pitch +
1440 sarea->frame.x * (dev_priv->color_fmt - 2)) & ~7)
1441 + offset);
1442 OUT_RING_REG(RADEON_CRTC2_OFFSET, dev_priv->sarea_priv->crtc2_base
1443 + offset);
Kevin E Martin0994e632001-01-05 22:57:55 +00001444
1445 ADVANCE_RING();
1446
1447 /* Increment the frame counter. The client-side 3D driver must
1448 * throttle the framerate by waiting for this value before
1449 * performing the swapbuffer ioctl.
1450 */
1451 dev_priv->sarea_priv->last_frame++;
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001452 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001453 1 - dev_priv->current_page;
Kevin E Martin0994e632001-01-05 22:57:55 +00001454
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001455 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00001456
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001457 RADEON_FRAME_AGE(dev_priv->sarea_priv->last_frame);
Kevin E Martin0994e632001-01-05 22:57:55 +00001458
1459 ADVANCE_RING();
1460}
1461
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001462static int bad_prim_vertex_nr(int primitive, int nr)
Keith Whitwell2dcada32002-06-12 15:50:28 +00001463{
1464 switch (primitive & RADEON_PRIM_TYPE_MASK) {
1465 case RADEON_PRIM_TYPE_NONE:
1466 case RADEON_PRIM_TYPE_POINT:
1467 return nr < 1;
1468 case RADEON_PRIM_TYPE_LINE:
1469 return (nr & 1) || nr == 0;
1470 case RADEON_PRIM_TYPE_LINE_STRIP:
1471 return nr < 2;
1472 case RADEON_PRIM_TYPE_TRI_LIST:
1473 case RADEON_PRIM_TYPE_3VRT_POINT_LIST:
1474 case RADEON_PRIM_TYPE_3VRT_LINE_LIST:
1475 case RADEON_PRIM_TYPE_RECT_LIST:
1476 return nr % 3 || nr == 0;
1477 case RADEON_PRIM_TYPE_TRI_FAN:
1478 case RADEON_PRIM_TYPE_TRI_STRIP:
1479 return nr < 3;
1480 default:
1481 return 1;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001482 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00001483}
1484
Keith Whitwell2dcada32002-06-12 15:50:28 +00001485typedef struct {
1486 unsigned int start;
1487 unsigned int finish;
1488 unsigned int prim;
1489 unsigned int numverts;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001490 unsigned int offset;
1491 unsigned int vc_format;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001492} drm_radeon_tcl_prim_t;
David Dawesab87c5d2002-02-14 02:00:26 +00001493
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001494static void radeon_cp_dispatch_vertex(drm_device_t * dev,
1495 drm_buf_t * buf,
1496 drm_radeon_tcl_prim_t * prim)
David Dawesab87c5d2002-02-14 02:00:26 +00001497{
1498 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001499 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Michel Daenzer062751a2003-08-26 15:44:01 +00001500 int offset = dev_priv->gart_buffers_offset + buf->offset + prim->start;
David Dawesab87c5d2002-02-14 02:00:26 +00001501 int numverts = (int)prim->numverts;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001502 int nbox = sarea_priv->nbox;
David Dawesab87c5d2002-02-14 02:00:26 +00001503 int i = 0;
1504 RING_LOCALS;
1505
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001506 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d %d verts\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00001507 prim->prim,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001508 prim->vc_format, prim->start, prim->finish, prim->numverts);
David Dawesab87c5d2002-02-14 02:00:26 +00001509
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001510 if (bad_prim_vertex_nr(prim->prim, prim->numverts)) {
1511 DRM_ERROR("bad prim %x numverts %d\n",
1512 prim->prim, prim->numverts);
Keith Whitwellbaef0862002-03-08 16:03:37 +00001513 return;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001514 }
David Dawesab87c5d2002-02-14 02:00:26 +00001515
1516 do {
1517 /* Emit the next cliprect */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001518 if (i < nbox) {
1519 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
David Dawesab87c5d2002-02-14 02:00:26 +00001520 }
1521
1522 /* Emit the vertex buffer rendering commands */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001523 BEGIN_RING(5);
David Dawesab87c5d2002-02-14 02:00:26 +00001524
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001525 OUT_RING(CP_PACKET3(RADEON_3D_RNDR_GEN_INDX_PRIM, 3));
1526 OUT_RING(offset);
1527 OUT_RING(numverts);
1528 OUT_RING(prim->vc_format);
1529 OUT_RING(prim->prim | RADEON_PRIM_WALK_LIST |
1530 RADEON_COLOR_ORDER_RGBA |
1531 RADEON_VTX_FMT_RADEON_MODE |
1532 (numverts << RADEON_NUM_VERTICES_SHIFT));
David Dawesab87c5d2002-02-14 02:00:26 +00001533
1534 ADVANCE_RING();
1535
1536 i++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001537 } while (i < nbox);
David Dawesab87c5d2002-02-14 02:00:26 +00001538}
1539
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001540static void radeon_cp_discard_buffer(drm_device_t * dev, drm_buf_t * buf)
Kevin E Martin0994e632001-01-05 22:57:55 +00001541{
1542 drm_radeon_private_t *dev_priv = dev->dev_private;
1543 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001544 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +00001545
Keith Whitwell2dcada32002-06-12 15:50:28 +00001546 buf_priv->age = ++dev_priv->sarea_priv->last_dispatch;
Kevin E Martin0994e632001-01-05 22:57:55 +00001547
David Dawesab87c5d2002-02-14 02:00:26 +00001548 /* Emit the vertex buffer age */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001549 BEGIN_RING(2);
1550 RADEON_DISPATCH_AGE(buf_priv->age);
David Dawesab87c5d2002-02-14 02:00:26 +00001551 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001552
David Dawesab87c5d2002-02-14 02:00:26 +00001553 buf->pending = 1;
1554 buf->used = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001555}
1556
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001557static void radeon_cp_dispatch_indirect(drm_device_t * dev,
1558 drm_buf_t * buf, int start, int end)
Kevin E Martin0994e632001-01-05 22:57:55 +00001559{
1560 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001561 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001562 DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
Kevin E Martin0994e632001-01-05 22:57:55 +00001563
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001564 if (start != end) {
Michel Daenzer062751a2003-08-26 15:44:01 +00001565 int offset = (dev_priv->gart_buffers_offset
Kevin E Martin0994e632001-01-05 22:57:55 +00001566 + buf->offset + start);
1567 int dwords = (end - start + 3) / sizeof(u32);
1568
1569 /* Indirect buffer data must be an even number of
1570 * dwords, so if we've been given an odd number we must
1571 * pad the data with a Type-2 CP packet.
1572 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001573 if (dwords & 1) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001574 u32 *data = (u32 *)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001575 ((char *)dev->agp_buffer_map->handle
1576 + buf->offset + start);
Kevin E Martin0994e632001-01-05 22:57:55 +00001577 data[dwords++] = RADEON_CP_PACKET2;
1578 }
1579
Kevin E Martin0994e632001-01-05 22:57:55 +00001580 /* Fire off the indirect buffer */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001581 BEGIN_RING(3);
Kevin E Martin0994e632001-01-05 22:57:55 +00001582
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001583 OUT_RING(CP_PACKET0(RADEON_CP_IB_BASE, 1));
1584 OUT_RING(offset);
1585 OUT_RING(dwords);
Kevin E Martin0994e632001-01-05 22:57:55 +00001586
1587 ADVANCE_RING();
1588 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001589}
1590
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001591static void radeon_cp_dispatch_indices(drm_device_t * dev,
1592 drm_buf_t * elt_buf,
1593 drm_radeon_tcl_prim_t * prim)
Kevin E Martin0994e632001-01-05 22:57:55 +00001594{
1595 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001596 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Michel Daenzer062751a2003-08-26 15:44:01 +00001597 int offset = dev_priv->gart_buffers_offset + prim->offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001598 u32 *data;
1599 int dwords;
1600 int i = 0;
David Dawesab87c5d2002-02-14 02:00:26 +00001601 int start = prim->start + RADEON_INDEX_PRIM_OFFSET;
1602 int count = (prim->finish - start) / sizeof(u16);
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001603 int nbox = sarea_priv->nbox;
Kevin E Martin0994e632001-01-05 22:57:55 +00001604
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001605 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d offset: %x nr %d\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00001606 prim->prim,
1607 prim->vc_format,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001608 prim->start, prim->finish, prim->offset, prim->numverts);
Kevin E Martin0994e632001-01-05 22:57:55 +00001609
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001610 if (bad_prim_vertex_nr(prim->prim, count)) {
1611 DRM_ERROR("bad prim %x count %d\n", prim->prim, count);
Keith Whitwellbaef0862002-03-08 16:03:37 +00001612 return;
Kevin E Martin0994e632001-01-05 22:57:55 +00001613 }
1614
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001615 if (start >= prim->finish || (prim->start & 0x7)) {
1616 DRM_ERROR("buffer prim %d\n", prim->prim);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001617 return;
1618 }
1619
1620 dwords = (prim->finish - prim->start + 3) / sizeof(u32);
1621
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001622 data = (u32 *) ((char *)dev->agp_buffer_map->handle +
1623 elt_buf->offset + prim->start);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001624
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001625 data[0] = CP_PACKET3(RADEON_3D_RNDR_GEN_INDX_PRIM, dwords - 2);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001626 data[1] = offset;
1627 data[2] = prim->numverts;
1628 data[3] = prim->vc_format;
1629 data[4] = (prim->prim |
1630 RADEON_PRIM_WALK_IND |
1631 RADEON_COLOR_ORDER_RGBA |
1632 RADEON_VTX_FMT_RADEON_MODE |
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001633 (count << RADEON_NUM_VERTICES_SHIFT));
Keith Whitwell2dcada32002-06-12 15:50:28 +00001634
1635 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001636 if (i < nbox)
1637 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001638
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001639 radeon_cp_dispatch_indirect(dev, elt_buf,
1640 prim->start, prim->finish);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001641
1642 i++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001643 } while (i < nbox);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001644
Kevin E Martin0994e632001-01-05 22:57:55 +00001645}
1646
Eric Anholtab59dd22005-07-20 21:17:47 +00001647#define RADEON_MAX_TEXTURE_SIZE RADEON_BUFFER_SIZE
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001648
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001649static int radeon_cp_dispatch_texture(DRMFILE filp,
1650 drm_device_t * dev,
1651 drm_radeon_texture_t * tex,
1652 drm_radeon_tex_image_t * image)
Kevin E Martin0994e632001-01-05 22:57:55 +00001653{
1654 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001655 drm_file_t *filp_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00001656 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00001657 u32 format;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001658 u32 *buffer;
Dave Airlie02df04d2004-07-25 08:47:38 +00001659 const u8 __user *data;
Eric Anholtab59dd22005-07-20 21:17:47 +00001660 int size, dwords, tex_width, blit_width, spitch;
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001661 u32 height;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001662 int i;
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001663 u32 texpitch, microtile;
Eric Anholtab59dd22005-07-20 21:17:47 +00001664 u32 offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001665 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +00001666
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001667 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001668
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001669 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &tex->offset)) {
1670 DRM_ERROR("Invalid destination offset\n");
1671 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001672 }
1673
Keith Whitwell48cc3502002-08-26 22:16:18 +00001674 dev_priv->stats.boxes |= RADEON_BOX_TEXTURE_LOAD;
1675
Keith Whitwellb03fa552002-12-06 12:22:43 +00001676 /* Flush the pixel cache. This ensures no pixel data gets mixed
1677 * up with the texture data from the host data blit, otherwise
1678 * part of the texture image may be corrupted.
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001679 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001680 BEGIN_RING(4);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001681 RADEON_FLUSH_CACHE();
1682 RADEON_WAIT_UNTIL_IDLE();
1683 ADVANCE_RING();
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001684
Kevin E Martin0994e632001-01-05 22:57:55 +00001685 /* The compiler won't optimize away a division by a variable,
1686 * even if the only legal values are powers of two. Thus, we'll
1687 * use a shift instead.
1688 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001689 switch (tex->format) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001690 case RADEON_TXFORMAT_ARGB8888:
1691 case RADEON_TXFORMAT_RGBA8888:
Kevin E Martin0994e632001-01-05 22:57:55 +00001692 format = RADEON_COLOR_FORMAT_ARGB8888;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001693 tex_width = tex->width * 4;
1694 blit_width = image->width * 4;
Kevin E Martin0994e632001-01-05 22:57:55 +00001695 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001696 case RADEON_TXFORMAT_AI88:
1697 case RADEON_TXFORMAT_ARGB1555:
1698 case RADEON_TXFORMAT_RGB565:
1699 case RADEON_TXFORMAT_ARGB4444:
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00001700 case RADEON_TXFORMAT_VYUY422:
1701 case RADEON_TXFORMAT_YVYU422:
Kevin E Martin0994e632001-01-05 22:57:55 +00001702 format = RADEON_COLOR_FORMAT_RGB565;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001703 tex_width = tex->width * 2;
1704 blit_width = image->width * 2;
Kevin E Martin0994e632001-01-05 22:57:55 +00001705 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001706 case RADEON_TXFORMAT_I8:
1707 case RADEON_TXFORMAT_RGB332:
Kevin E Martin0994e632001-01-05 22:57:55 +00001708 format = RADEON_COLOR_FORMAT_CI8;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001709 tex_width = tex->width * 1;
1710 blit_width = image->width * 1;
Kevin E Martin0994e632001-01-05 22:57:55 +00001711 break;
1712 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001713 DRM_ERROR("invalid texture format %d\n", tex->format);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001714 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001715 }
Eric Anholtab59dd22005-07-20 21:17:47 +00001716 spitch = blit_width >> 6;
1717 if (spitch == 0 && image->height > 1)
1718 return DRM_ERR(EINVAL);
1719
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001720 texpitch = tex->pitch;
1721 if ((texpitch << 22) & RADEON_DST_TILE_MICRO) {
1722 microtile = 1;
1723 if (tex_width < 64) {
1724 texpitch &= ~(RADEON_DST_TILE_MICRO >> 22);
1725 /* we got tiled coordinates, untile them */
1726 image->x *= 2;
1727 }
Dave Airlie4791dc82006-02-18 02:53:36 +00001728 } else
1729 microtile = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001730
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001731 DRM_DEBUG("tex=%dx%d blit=%d\n", tex_width, tex->height, blit_width);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001732
Keith Whitwellb03fa552002-12-06 12:22:43 +00001733 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001734 DRM_DEBUG("tex: ofs=0x%x p=%d f=%d x=%hd y=%hd w=%hd h=%hd\n",
1735 tex->offset >> 10, tex->pitch, tex->format,
1736 image->x, image->y, image->width, image->height);
Kevin E Martin0994e632001-01-05 22:57:55 +00001737
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001738 /* Make a copy of some parameters in case we have to
Keith Whitwellb03fa552002-12-06 12:22:43 +00001739 * update them for a multi-pass texture blit.
1740 */
Keith Whitwellb03fa552002-12-06 12:22:43 +00001741 height = image->height;
Dave Airlie02df04d2004-07-25 08:47:38 +00001742 data = (const u8 __user *)image->data;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001743
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001744 size = height * blit_width;
1745
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001746 if (size > RADEON_MAX_TEXTURE_SIZE) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001747 height = RADEON_MAX_TEXTURE_SIZE / blit_width;
1748 size = height * blit_width;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001749 } else if (size < 4 && size > 0) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001750 size = 4;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001751 } else if (size == 0) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001752 return 0;
1753 }
1754
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001755 buf = radeon_freelist_get(dev);
1756 if (0 && !buf) {
1757 radeon_do_cp_idle(dev_priv);
1758 buf = radeon_freelist_get(dev);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001759 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001760 if (!buf) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001761 DRM_DEBUG("radeon_cp_dispatch_texture: EAGAIN\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001762 if (DRM_COPY_TO_USER(tex->image, image, sizeof(*image)))
Dave Airlie7809efc2004-08-30 09:01:50 +00001763 return DRM_ERR(EFAULT);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001764 return DRM_ERR(EAGAIN);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001765 }
Keith Whitwellb03fa552002-12-06 12:22:43 +00001766
Keith Whitwellb03fa552002-12-06 12:22:43 +00001767 /* Dispatch the indirect buffer.
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001768 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001769 buffer =
1770 (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001771 dwords = size / 4;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001772
Dave Airlie4791dc82006-02-18 02:53:36 +00001773#define RADEON_COPY_MT(_buf, _data, _width) \
1774 do { \
1775 if (DRM_COPY_FROM_USER(_buf, _data, (_width))) {\
1776 DRM_ERROR("EFAULT on pad, %d bytes\n", (_width)); \
1777 return DRM_ERR(EFAULT); \
1778 } \
1779 } while(0)
1780
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001781 if (microtile) {
1782 /* texture micro tiling in use, minimum texture width is thus 16 bytes.
1783 however, we cannot use blitter directly for texture width < 64 bytes,
1784 since minimum tex pitch is 64 bytes and we need this to match
1785 the texture width, otherwise the blitter will tile it wrong.
1786 Thus, tiling manually in this case. Additionally, need to special
1787 case tex height = 1, since our actual image will have height 2
1788 and we need to ensure we don't read beyond the texture size
1789 from user space. */
1790 if (tex->height == 1) {
1791 if (tex_width >= 64 || tex_width <= 16) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001792 RADEON_COPY_MT(buffer, data,
Dave Airlie985738f2006-03-25 07:16:14 +00001793 (int)(tex_width * sizeof(u32)));
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001794 } else if (tex_width == 32) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001795 RADEON_COPY_MT(buffer, data, 16);
1796 RADEON_COPY_MT(buffer + 8,
1797 data + 16, 16);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001798 }
1799 } else if (tex_width >= 64 || tex_width == 16) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001800 RADEON_COPY_MT(buffer, data,
Dave Airlie985738f2006-03-25 07:16:14 +00001801 (int)(dwords * sizeof(u32)));
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001802 } else if (tex_width < 16) {
1803 for (i = 0; i < tex->height; i++) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001804 RADEON_COPY_MT(buffer, data, tex_width);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001805 buffer += 4;
1806 data += tex_width;
1807 }
1808 } else if (tex_width == 32) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001809 /* TODO: make sure this works when not fitting in one buffer
1810 (i.e. 32bytes x 2048...) */
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001811 for (i = 0; i < tex->height; i += 2) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001812 RADEON_COPY_MT(buffer, data, 16);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001813 data += 16;
Dave Airlie4791dc82006-02-18 02:53:36 +00001814 RADEON_COPY_MT(buffer + 8, data, 16);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001815 data += 16;
Dave Airlie4791dc82006-02-18 02:53:36 +00001816 RADEON_COPY_MT(buffer + 4, data, 16);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001817 data += 16;
Dave Airlie4791dc82006-02-18 02:53:36 +00001818 RADEON_COPY_MT(buffer + 12, data, 16);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001819 data += 16;
1820 buffer += 16;
1821 }
1822 }
Dave Airlie4791dc82006-02-18 02:53:36 +00001823 } else {
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001824 if (tex_width >= 32) {
1825 /* Texture image width is larger than the minimum, so we
1826 * can upload it directly.
1827 */
Dave Airlie4791dc82006-02-18 02:53:36 +00001828 RADEON_COPY_MT(buffer, data,
Dave Airlie985738f2006-03-25 07:16:14 +00001829 (int)(dwords * sizeof(u32)));
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001830 } else {
1831 /* Texture image width is less than the minimum, so we
1832 * need to pad out each image scanline to the minimum
1833 * width.
1834 */
1835 for (i = 0; i < tex->height; i++) {
Dave Airlie4791dc82006-02-18 02:53:36 +00001836 RADEON_COPY_MT(buffer, data, tex_width);
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001837 buffer += 8;
1838 data += tex_width;
1839 }
Keith Whitwellb03fa552002-12-06 12:22:43 +00001840 }
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001841 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001842
Dave Airlie4791dc82006-02-18 02:53:36 +00001843#undef RADEON_COPY_MT
Keith Whitwell1728bc62003-03-28 14:27:37 +00001844 buf->filp = filp;
Eric Anholtab59dd22005-07-20 21:17:47 +00001845 buf->used = size;
1846 offset = dev_priv->gart_buffers_offset + buf->offset;
1847 BEGIN_RING(9);
1848 OUT_RING(CP_PACKET3(RADEON_CNTL_BITBLT_MULTI, 5));
1849 OUT_RING(RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
1850 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
1851 RADEON_GMC_BRUSH_NONE |
1852 (format << 8) |
1853 RADEON_GMC_SRC_DATATYPE_COLOR |
1854 RADEON_ROP3_S |
1855 RADEON_DP_SRC_SOURCE_MEMORY |
Dave Airlie4791dc82006-02-18 02:53:36 +00001856 RADEON_GMC_CLR_CMP_CNTL_DIS | RADEON_GMC_WR_MSK_DIS);
Eric Anholtab59dd22005-07-20 21:17:47 +00001857 OUT_RING((spitch << 22) | (offset >> 10));
1858 OUT_RING((texpitch << 22) | (tex->offset >> 10));
1859 OUT_RING(0);
1860 OUT_RING((image->x << 16) | image->y);
1861 OUT_RING((image->width << 16) | height);
1862 RADEON_WAIT_UNTIL_2D_IDLE();
1863 ADVANCE_RING();
1864
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001865 radeon_cp_discard_buffer(dev, buf);
David Dawes0e5b8d72001-03-19 17:45:52 +00001866
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001867 /* Update the input parameters for next time */
1868 image->y += height;
1869 image->height -= height;
Dave Airlie02df04d2004-07-25 08:47:38 +00001870 image->data = (const u8 __user *)image->data + size;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001871 } while (image->height > 0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001872
1873 /* Flush the pixel cache after the blit completes. This ensures
1874 * the texture data is written out to memory before rendering
1875 * continues.
1876 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001877 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001878 RADEON_FLUSH_CACHE();
1879 RADEON_WAIT_UNTIL_2D_IDLE();
Kevin E Martin0994e632001-01-05 22:57:55 +00001880 ADVANCE_RING();
Keith Whitwellb03fa552002-12-06 12:22:43 +00001881 return 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001882}
1883
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001884static void radeon_cp_dispatch_stipple(drm_device_t * dev, u32 * stipple)
Kevin E Martin0994e632001-01-05 22:57:55 +00001885{
1886 drm_radeon_private_t *dev_priv = dev->dev_private;
1887 int i;
1888 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001889 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00001890
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001891 BEGIN_RING(35);
Kevin E Martin0994e632001-01-05 22:57:55 +00001892
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001893 OUT_RING(CP_PACKET0(RADEON_RE_STIPPLE_ADDR, 0));
1894 OUT_RING(0x00000000);
Kevin E Martin0994e632001-01-05 22:57:55 +00001895
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001896 OUT_RING(CP_PACKET0_TABLE(RADEON_RE_STIPPLE_DATA, 31));
1897 for (i = 0; i < 32; i++) {
1898 OUT_RING(stipple[i]);
Kevin E Martin0994e632001-01-05 22:57:55 +00001899 }
1900
1901 ADVANCE_RING();
1902}
1903
Dave Airlie4791dc82006-02-18 02:53:36 +00001904static void radeon_apply_surface_regs(int surf_index,
1905 drm_radeon_private_t *dev_priv)
Roland Scheidegger43c32232005-01-26 17:48:59 +00001906{
1907 if (!dev_priv->mmio)
1908 return;
1909
1910 radeon_do_cp_idle(dev_priv);
1911
Dave Airlie4791dc82006-02-18 02:53:36 +00001912 RADEON_WRITE(RADEON_SURFACE0_INFO + 16 * surf_index,
1913 dev_priv->surfaces[surf_index].flags);
1914 RADEON_WRITE(RADEON_SURFACE0_LOWER_BOUND + 16 * surf_index,
1915 dev_priv->surfaces[surf_index].lower);
1916 RADEON_WRITE(RADEON_SURFACE0_UPPER_BOUND + 16 * surf_index,
1917 dev_priv->surfaces[surf_index].upper);
Roland Scheidegger43c32232005-01-26 17:48:59 +00001918}
1919
1920/* Allocates a virtual surface
Dave Airlie4791dc82006-02-18 02:53:36 +00001921 * doesn't always allocate a real surface, will stretch an existing
Roland Scheidegger43c32232005-01-26 17:48:59 +00001922 * surface when possible.
1923 *
1924 * Note that refcount can be at most 2, since during a free refcount=3
1925 * might mean we have to allocate a new surface which might not always
1926 * be available.
Dave Airlie4791dc82006-02-18 02:53:36 +00001927 * For example : we allocate three contigous surfaces ABC. If B is
Roland Scheidegger43c32232005-01-26 17:48:59 +00001928 * freed, we suddenly need two surfaces to store A and C, which might
1929 * not always be available.
1930 */
Dave Airlie4791dc82006-02-18 02:53:36 +00001931static int alloc_surface(drm_radeon_surface_alloc_t *new,
1932 drm_radeon_private_t *dev_priv, DRMFILE filp)
Roland Scheidegger43c32232005-01-26 17:48:59 +00001933{
1934 struct radeon_virt_surface *s;
1935 int i;
1936 int virt_surface_index;
1937 uint32_t new_upper, new_lower;
1938
1939 new_lower = new->address;
1940 new_upper = new_lower + new->size - 1;
1941
1942 /* sanity check */
1943 if ((new_lower >= new_upper) || (new->flags == 0) || (new->size == 0) ||
Dave Airlie4791dc82006-02-18 02:53:36 +00001944 ((new_upper & RADEON_SURF_ADDRESS_FIXED_MASK) !=
1945 RADEON_SURF_ADDRESS_FIXED_MASK)
1946 || ((new_lower & RADEON_SURF_ADDRESS_FIXED_MASK) != 0))
Roland Scheidegger43c32232005-01-26 17:48:59 +00001947 return -1;
1948
1949 /* make sure there is no overlap with existing surfaces */
1950 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1951 if ((dev_priv->surfaces[i].refcount != 0) &&
Dave Airlie4791dc82006-02-18 02:53:36 +00001952 (((new_lower >= dev_priv->surfaces[i].lower) &&
1953 (new_lower < dev_priv->surfaces[i].upper)) ||
1954 ((new_lower < dev_priv->surfaces[i].lower) &&
1955 (new_upper > dev_priv->surfaces[i].lower)))) {
1956 return -1;
1957 }
Roland Scheidegger43c32232005-01-26 17:48:59 +00001958 }
1959
1960 /* find a virtual surface */
Dave Airlie4791dc82006-02-18 02:53:36 +00001961 for (i = 0; i < 2 * RADEON_MAX_SURFACES; i++)
Roland Scheidegger43c32232005-01-26 17:48:59 +00001962 if (dev_priv->virt_surfaces[i].filp == 0)
1963 break;
Dave Airlie4791dc82006-02-18 02:53:36 +00001964 if (i == 2 * RADEON_MAX_SURFACES) {
1965 return -1;
1966 }
Roland Scheidegger43c32232005-01-26 17:48:59 +00001967 virt_surface_index = i;
1968
1969 /* try to reuse an existing surface */
1970 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1971 /* extend before */
1972 if ((dev_priv->surfaces[i].refcount == 1) &&
Dave Airlie4791dc82006-02-18 02:53:36 +00001973 (new->flags == dev_priv->surfaces[i].flags) &&
1974 (new_upper + 1 == dev_priv->surfaces[i].lower)) {
Roland Scheidegger43c32232005-01-26 17:48:59 +00001975 s = &(dev_priv->virt_surfaces[virt_surface_index]);
1976 s->surface_index = i;
1977 s->lower = new_lower;
1978 s->upper = new_upper;
1979 s->flags = new->flags;
1980 s->filp = filp;
1981 dev_priv->surfaces[i].refcount++;
1982 dev_priv->surfaces[i].lower = s->lower;
1983 radeon_apply_surface_regs(s->surface_index, dev_priv);
1984 return virt_surface_index;
1985 }
1986
1987 /* extend after */
1988 if ((dev_priv->surfaces[i].refcount == 1) &&
Dave Airlie4791dc82006-02-18 02:53:36 +00001989 (new->flags == dev_priv->surfaces[i].flags) &&
1990 (new_lower == dev_priv->surfaces[i].upper + 1)) {
Roland Scheidegger43c32232005-01-26 17:48:59 +00001991 s = &(dev_priv->virt_surfaces[virt_surface_index]);
1992 s->surface_index = i;
1993 s->lower = new_lower;
1994 s->upper = new_upper;
1995 s->flags = new->flags;
1996 s->filp = filp;
1997 dev_priv->surfaces[i].refcount++;
1998 dev_priv->surfaces[i].upper = s->upper;
1999 radeon_apply_surface_regs(s->surface_index, dev_priv);
2000 return virt_surface_index;
2001 }
2002 }
2003
2004 /* okay, we need a new one */
2005 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
2006 if (dev_priv->surfaces[i].refcount == 0) {
2007 s = &(dev_priv->virt_surfaces[virt_surface_index]);
2008 s->surface_index = i;
2009 s->lower = new_lower;
2010 s->upper = new_upper;
2011 s->flags = new->flags;
2012 s->filp = filp;
2013 dev_priv->surfaces[i].refcount = 1;
2014 dev_priv->surfaces[i].lower = s->lower;
2015 dev_priv->surfaces[i].upper = s->upper;
2016 dev_priv->surfaces[i].flags = s->flags;
2017 radeon_apply_surface_regs(s->surface_index, dev_priv);
2018 return virt_surface_index;
2019 }
2020 }
2021
2022 /* we didn't find anything */
2023 return -1;
2024}
2025
Dave Airlie4791dc82006-02-18 02:53:36 +00002026static int free_surface(DRMFILE filp, drm_radeon_private_t * dev_priv,
2027 int lower)
Roland Scheidegger43c32232005-01-26 17:48:59 +00002028{
2029 struct radeon_virt_surface *s;
2030 int i;
2031 /* find the virtual surface */
Dave Airlie4791dc82006-02-18 02:53:36 +00002032 for (i = 0; i < 2 * RADEON_MAX_SURFACES; i++) {
Roland Scheidegger43c32232005-01-26 17:48:59 +00002033 s = &(dev_priv->virt_surfaces[i]);
2034 if (s->filp) {
2035 if ((lower == s->lower) && (filp == s->filp)) {
Dave Airlie4791dc82006-02-18 02:53:36 +00002036 if (dev_priv->surfaces[s->surface_index].
2037 lower == s->lower)
2038 dev_priv->surfaces[s->surface_index].
2039 lower = s->upper;
Roland Scheidegger43c32232005-01-26 17:48:59 +00002040
Dave Airlie4791dc82006-02-18 02:53:36 +00002041 if (dev_priv->surfaces[s->surface_index].
2042 upper == s->upper)
2043 dev_priv->surfaces[s->surface_index].
2044 upper = s->lower;
Roland Scheidegger43c32232005-01-26 17:48:59 +00002045
2046 dev_priv->surfaces[s->surface_index].refcount--;
Dave Airlie4791dc82006-02-18 02:53:36 +00002047 if (dev_priv->surfaces[s->surface_index].
2048 refcount == 0)
2049 dev_priv->surfaces[s->surface_index].
2050 flags = 0;
2051 s->filp = NULL;
2052 radeon_apply_surface_regs(s->surface_index,
2053 dev_priv);
Roland Scheidegger43c32232005-01-26 17:48:59 +00002054 return 0;
2055 }
2056 }
2057 }
2058 return 1;
2059}
2060
Dave Airlie4791dc82006-02-18 02:53:36 +00002061static void radeon_surfaces_release(DRMFILE filp,
2062 drm_radeon_private_t * dev_priv)
Roland Scheidegger43c32232005-01-26 17:48:59 +00002063{
2064 int i;
Dave Airlie4791dc82006-02-18 02:53:36 +00002065 for (i = 0; i < 2 * RADEON_MAX_SURFACES; i++) {
Roland Scheidegger43c32232005-01-26 17:48:59 +00002066 if (dev_priv->virt_surfaces[i].filp == filp)
Dave Airlie4791dc82006-02-18 02:53:36 +00002067 free_surface(filp, dev_priv,
2068 dev_priv->virt_surfaces[i].lower);
Roland Scheidegger43c32232005-01-26 17:48:59 +00002069 }
2070}
2071
Kevin E Martin0994e632001-01-05 22:57:55 +00002072/* ================================================================
2073 * IOCTL functions
2074 */
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002075static int radeon_surface_alloc(DRM_IOCTL_ARGS)
Roland Scheidegger43c32232005-01-26 17:48:59 +00002076{
2077 DRM_DEVICE;
2078 drm_radeon_private_t *dev_priv = dev->dev_private;
2079 drm_radeon_surface_alloc_t alloc;
2080
2081 if (!dev_priv) {
Dave Airlie4791dc82006-02-18 02:53:36 +00002082 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Roland Scheidegger43c32232005-01-26 17:48:59 +00002083 return DRM_ERR(EINVAL);
2084 }
2085
Dave Airlie4791dc82006-02-18 02:53:36 +00002086 DRM_COPY_FROM_USER_IOCTL(alloc,
2087 (drm_radeon_surface_alloc_t __user *) data,
2088 sizeof(alloc));
Roland Scheidegger43c32232005-01-26 17:48:59 +00002089
2090 if (alloc_surface(&alloc, dev_priv, filp) == -1)
2091 return DRM_ERR(EINVAL);
2092 else
2093 return 0;
2094}
2095
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002096static int radeon_surface_free(DRM_IOCTL_ARGS)
Roland Scheidegger43c32232005-01-26 17:48:59 +00002097{
2098 DRM_DEVICE;
2099 drm_radeon_private_t *dev_priv = dev->dev_private;
2100 drm_radeon_surface_free_t memfree;
2101
2102 if (!dev_priv) {
Dave Airlie4791dc82006-02-18 02:53:36 +00002103 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Roland Scheidegger43c32232005-01-26 17:48:59 +00002104 return DRM_ERR(EINVAL);
2105 }
2106
Dave Airlieea40d3d2006-03-08 23:01:32 +00002107 DRM_COPY_FROM_USER_IOCTL(memfree, (drm_radeon_surface_free_t __user *) data,
Dave Airlie4791dc82006-02-18 02:53:36 +00002108 sizeof(memfree));
Roland Scheidegger43c32232005-01-26 17:48:59 +00002109
2110 if (free_surface(filp, dev_priv, memfree.address))
2111 return DRM_ERR(EINVAL);
2112 else
2113 return 0;
2114}
2115
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002116static int radeon_cp_clear(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002117{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002118 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002119 drm_radeon_private_t *dev_priv = dev->dev_private;
2120 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
2121 drm_radeon_clear_t clear;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002122 drm_radeon_clear_rect_t depth_boxes[RADEON_NR_SAREA_CLIPRECTS];
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002123 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00002124
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002125 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00002126
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002127 DRM_COPY_FROM_USER_IOCTL(clear, (drm_radeon_clear_t __user *) data,
2128 sizeof(clear));
Kevin E Martin0994e632001-01-05 22:57:55 +00002129
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002130 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002131
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002132 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002133 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
2134
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002135 if (DRM_COPY_FROM_USER(&depth_boxes, clear.depth_boxes,
2136 sarea_priv->nbox * sizeof(depth_boxes[0])))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002137 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002138
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002139 radeon_cp_dispatch_clear(dev, &clear, depth_boxes);
Kevin E Martin0994e632001-01-05 22:57:55 +00002140
Keith Whitwell2dcada32002-06-12 15:50:28 +00002141 COMMIT_RING();
2142 return 0;
2143}
2144
Keith Whitwell2dcada32002-06-12 15:50:28 +00002145/* Not sure why this isn't set all the time:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002146 */
2147static int radeon_do_init_pageflip(drm_device_t * dev)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002148{
2149 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell24025ca2002-07-04 12:03:15 +00002150 RING_LOCALS;
2151
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002152 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002153
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002154 BEGIN_RING(6);
Keith Whitwell24025ca2002-07-04 12:03:15 +00002155 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002156 OUT_RING(CP_PACKET0(RADEON_CRTC_OFFSET_CNTL, 0));
2157 OUT_RING(RADEON_READ(RADEON_CRTC_OFFSET_CNTL) |
2158 RADEON_CRTC_OFFSET_FLIP_CNTL);
2159 OUT_RING(CP_PACKET0(RADEON_CRTC2_OFFSET_CNTL, 0));
2160 OUT_RING(RADEON_READ(RADEON_CRTC2_OFFSET_CNTL) |
2161 RADEON_CRTC_OFFSET_FLIP_CNTL);
Keith Whitwell24025ca2002-07-04 12:03:15 +00002162 ADVANCE_RING();
2163
Keith Whitwell2dcada32002-06-12 15:50:28 +00002164 dev_priv->page_flipping = 1;
2165 dev_priv->current_page = 0;
Keith Whitwellbb91bc02002-06-27 17:56:39 +00002166 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002167
2168 return 0;
2169}
2170
Jon Smirlfa6b1d12004-09-27 19:51:38 +00002171/* Called whenever a client dies, from drm_release.
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002172 * NOTE: Lock isn't necessarily held when this is called!
2173 */
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002174static int radeon_do_cleanup_pageflip(drm_device_t * dev)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002175{
2176 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002177 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002178
Keith Whitwell24025ca2002-07-04 12:03:15 +00002179 if (dev_priv->current_page != 0)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002180 radeon_cp_dispatch_flip(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002181
2182 dev_priv->page_flipping = 0;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002183 return 0;
2184}
2185
2186/* Swapping and flipping are different operations, need different ioctls.
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002187 * They can & should be intermixed to support multiple 3d windows.
Keith Whitwell2dcada32002-06-12 15:50:28 +00002188 */
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002189static int radeon_cp_flip(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002190{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002191 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002192 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002193 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002194
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002195 LOCK_TEST_WITH_RETURN(dev, filp);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002196
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002197 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002198
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002199 if (!dev_priv->page_flipping)
2200 radeon_do_init_pageflip(dev);
2201
2202 radeon_cp_dispatch_flip(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002203
2204 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002205 return 0;
2206}
2207
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002208static int radeon_cp_swap(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002209{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002210 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002211 drm_radeon_private_t *dev_priv = dev->dev_private;
2212 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002213 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00002214
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002215 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002216
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002217 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Gareth Hughes4d2a4452001-01-24 15:34:46 +00002218
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002219 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002220 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
2221
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002222 radeon_cp_dispatch_swap(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002223 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00002224
Keith Whitwell2dcada32002-06-12 15:50:28 +00002225 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002226 return 0;
2227}
2228
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002229static int radeon_cp_vertex(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002230{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002231 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002232 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002233 drm_file_t *filp_priv;
Dave Airlieef835972006-03-08 06:03:45 +00002234 drm_radeon_sarea_t *sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00002235 drm_device_dma_t *dma = dev->dma;
2236 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002237 drm_radeon_vertex_t vertex;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002238 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00002239
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002240 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002241
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002242 if (!dev_priv) {
2243 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002244 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002245 }
2246
Dave Airlieef835972006-03-08 06:03:45 +00002247 sarea_priv = dev_priv->sarea_priv;
2248
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002249 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002250
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002251 DRM_COPY_FROM_USER_IOCTL(vertex, (drm_radeon_vertex_t __user *) data,
2252 sizeof(vertex));
Kevin E Martin0994e632001-01-05 22:57:55 +00002253
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002254 DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
2255 DRM_CURRENTPID, vertex.idx, vertex.count, vertex.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002256
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002257 if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
2258 DRM_ERROR("buffer index %d (of %d max)\n",
2259 vertex.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002260 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002261 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002262 if (vertex.prim < 0 || vertex.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST) {
2263 DRM_ERROR("buffer prim %d\n", vertex.prim);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002264 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002265 }
2266
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002267 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2268 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002269
2270 buf = dma->buflist[vertex.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002271
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002272 if (buf->filp != filp) {
2273 DRM_ERROR("process %d using buffer owned by %p\n",
2274 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002275 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002276 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002277 if (buf->pending) {
2278 DRM_ERROR("sending pending buffer %d\n", vertex.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002279 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002280 }
2281
Keith Whitwell2dcada32002-06-12 15:50:28 +00002282 /* Build up a prim_t record:
2283 */
Keith Whitwellbaef0862002-03-08 16:03:37 +00002284 if (vertex.count) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002285 buf->used = vertex.count; /* not used? */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002286
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002287 if (sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS) {
2288 if (radeon_emit_state(dev_priv, filp_priv,
2289 &sarea_priv->context_state,
2290 sarea_priv->tex_state,
2291 sarea_priv->dirty)) {
2292 DRM_ERROR("radeon_emit_state failed\n");
2293 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002294 }
2295
Keith Whitwellbaef0862002-03-08 16:03:37 +00002296 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
2297 RADEON_UPLOAD_TEX1IMAGES |
2298 RADEON_UPLOAD_TEX2IMAGES |
2299 RADEON_REQUIRE_QUIESCENCE);
2300 }
David Dawesab87c5d2002-02-14 02:00:26 +00002301
Keith Whitwellbaef0862002-03-08 16:03:37 +00002302 prim.start = 0;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002303 prim.finish = vertex.count; /* unused */
Keith Whitwellbaef0862002-03-08 16:03:37 +00002304 prim.prim = vertex.prim;
Keith Whitwellbaef0862002-03-08 16:03:37 +00002305 prim.numverts = vertex.count;
2306 prim.vc_format = dev_priv->sarea_priv->vc_format;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002307
2308 radeon_cp_dispatch_vertex(dev, buf, &prim);
David Dawesab87c5d2002-02-14 02:00:26 +00002309 }
2310
David Dawesab87c5d2002-02-14 02:00:26 +00002311 if (vertex.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002312 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002313 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002314
Keith Whitwell2dcada32002-06-12 15:50:28 +00002315 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002316 return 0;
2317}
2318
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002319static int radeon_cp_indices(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002320{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002321 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002322 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002323 drm_file_t *filp_priv;
Dave Airlieef835972006-03-08 06:03:45 +00002324 drm_radeon_sarea_t *sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00002325 drm_device_dma_t *dma = dev->dma;
2326 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002327 drm_radeon_indices_t elts;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002328 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00002329 int count;
2330
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002331 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002332
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002333 if (!dev_priv) {
2334 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002335 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002336 }
Dave Airlieef835972006-03-08 06:03:45 +00002337 sarea_priv = dev_priv->sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00002338
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002339 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002340
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002341 DRM_COPY_FROM_USER_IOCTL(elts, (drm_radeon_indices_t __user *) data,
2342 sizeof(elts));
Kevin E Martin0994e632001-01-05 22:57:55 +00002343
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002344 DRM_DEBUG("pid=%d index=%d start=%d end=%d discard=%d\n",
2345 DRM_CURRENTPID, elts.idx, elts.start, elts.end, elts.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002346
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002347 if (elts.idx < 0 || elts.idx >= dma->buf_count) {
2348 DRM_ERROR("buffer index %d (of %d max)\n",
2349 elts.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002350 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002351 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002352 if (elts.prim < 0 || elts.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST) {
2353 DRM_ERROR("buffer prim %d\n", elts.prim);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002354 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002355 }
2356
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002357 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2358 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002359
2360 buf = dma->buflist[elts.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002361
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002362 if (buf->filp != filp) {
2363 DRM_ERROR("process %d using buffer owned by %p\n",
2364 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002365 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002366 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002367 if (buf->pending) {
2368 DRM_ERROR("sending pending buffer %d\n", elts.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002369 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002370 }
2371
2372 count = (elts.end - elts.start) / sizeof(u16);
2373 elts.start -= RADEON_INDEX_PRIM_OFFSET;
2374
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002375 if (elts.start & 0x7) {
2376 DRM_ERROR("misaligned buffer 0x%x\n", elts.start);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002377 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002378 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002379 if (elts.start < buf->used) {
2380 DRM_ERROR("no header 0x%x - 0x%x\n", elts.start, buf->used);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002381 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002382 }
2383
2384 buf->used = elts.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00002385
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002386 if (sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS) {
2387 if (radeon_emit_state(dev_priv, filp_priv,
2388 &sarea_priv->context_state,
2389 sarea_priv->tex_state,
2390 sarea_priv->dirty)) {
2391 DRM_ERROR("radeon_emit_state failed\n");
2392 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002393 }
David Dawesab87c5d2002-02-14 02:00:26 +00002394
2395 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
2396 RADEON_UPLOAD_TEX1IMAGES |
2397 RADEON_UPLOAD_TEX2IMAGES |
2398 RADEON_REQUIRE_QUIESCENCE);
2399 }
2400
David Dawesab87c5d2002-02-14 02:00:26 +00002401 /* Build up a prim_t record:
2402 */
2403 prim.start = elts.start;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002404 prim.finish = elts.end;
David Dawesab87c5d2002-02-14 02:00:26 +00002405 prim.prim = elts.prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002406 prim.offset = 0; /* offset from start of dma buffers */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002407 prim.numverts = RADEON_MAX_VB_VERTS; /* duh */
David Dawesab87c5d2002-02-14 02:00:26 +00002408 prim.vc_format = dev_priv->sarea_priv->vc_format;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002409
2410 radeon_cp_dispatch_indices(dev, buf, &prim);
David Dawesab87c5d2002-02-14 02:00:26 +00002411 if (elts.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002412 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002413 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002414
Keith Whitwell2dcada32002-06-12 15:50:28 +00002415 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002416 return 0;
2417}
2418
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002419static int radeon_cp_texture(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002420{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002421 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002422 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002423 drm_radeon_texture_t tex;
2424 drm_radeon_tex_image_t image;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002425 int ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00002426
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002427 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00002428
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002429 DRM_COPY_FROM_USER_IOCTL(tex, (drm_radeon_texture_t __user *) data,
2430 sizeof(tex));
Gareth Hughes3a74d3a2001-03-06 04:37:37 +00002431
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002432 if (tex.image == NULL) {
2433 DRM_ERROR("null texture image!\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002434 return DRM_ERR(EINVAL);
David Dawes0e5b8d72001-03-19 17:45:52 +00002435 }
2436
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002437 if (DRM_COPY_FROM_USER(&image,
2438 (drm_radeon_tex_image_t __user *) tex.image,
2439 sizeof(image)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002440 return DRM_ERR(EFAULT);
David Dawes0e5b8d72001-03-19 17:45:52 +00002441
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002442 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2443 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002444
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002445 ret = radeon_cp_dispatch_texture(filp, dev, &tex, &image);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002446
2447 COMMIT_RING();
2448 return ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00002449}
2450
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002451static int radeon_cp_stipple(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002452{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002453 DRM_DEVICE;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002454 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00002455 drm_radeon_stipple_t stipple;
2456 u32 mask[32];
2457
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002458 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00002459
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002460 DRM_COPY_FROM_USER_IOCTL(stipple, (drm_radeon_stipple_t __user *) data,
2461 sizeof(stipple));
Kevin E Martin0994e632001-01-05 22:57:55 +00002462
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002463 if (DRM_COPY_FROM_USER(&mask, stipple.mask, 32 * sizeof(u32)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002464 return DRM_ERR(EFAULT);
Kevin E Martin0994e632001-01-05 22:57:55 +00002465
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002466 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002467
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002468 radeon_cp_dispatch_stipple(dev, mask);
Kevin E Martin0994e632001-01-05 22:57:55 +00002469
Keith Whitwell2dcada32002-06-12 15:50:28 +00002470 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002471 return 0;
2472}
2473
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002474static int radeon_cp_indirect(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002475{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002476 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002477 drm_radeon_private_t *dev_priv = dev->dev_private;
2478 drm_device_dma_t *dma = dev->dma;
2479 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002480 drm_radeon_indirect_t indirect;
2481 RING_LOCALS;
2482
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002483 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002484
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002485 if (!dev_priv) {
2486 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002487 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002488 }
2489
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002490 DRM_COPY_FROM_USER_IOCTL(indirect,
2491 (drm_radeon_indirect_t __user *) data,
2492 sizeof(indirect));
Kevin E Martin0994e632001-01-05 22:57:55 +00002493
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002494 DRM_DEBUG("indirect: idx=%d s=%d e=%d d=%d\n",
2495 indirect.idx, indirect.start, indirect.end, indirect.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002496
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002497 if (indirect.idx < 0 || indirect.idx >= dma->buf_count) {
2498 DRM_ERROR("buffer index %d (of %d max)\n",
2499 indirect.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002500 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002501 }
2502
2503 buf = dma->buflist[indirect.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002504
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002505 if (buf->filp != filp) {
2506 DRM_ERROR("process %d using buffer owned by %p\n",
2507 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002508 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002509 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002510 if (buf->pending) {
2511 DRM_ERROR("sending pending buffer %d\n", indirect.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002512 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002513 }
2514
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002515 if (indirect.start < buf->used) {
2516 DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
2517 indirect.start, buf->used);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002518 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002519 }
2520
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002521 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2522 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002523
2524 buf->used = indirect.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00002525
2526 /* Wait for the 3D stream to idle before the indirect buffer
2527 * containing 2D acceleration commands is processed.
2528 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002529 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00002530
2531 RADEON_WAIT_UNTIL_3D_IDLE();
2532
2533 ADVANCE_RING();
2534
2535 /* Dispatch the indirect buffer full of commands from the
2536 * X server. This is insecure and is thus only available to
2537 * privileged clients.
2538 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002539 radeon_cp_dispatch_indirect(dev, buf, indirect.start, indirect.end);
David Dawesab87c5d2002-02-14 02:00:26 +00002540 if (indirect.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002541 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002542 }
2543
Keith Whitwell2dcada32002-06-12 15:50:28 +00002544 COMMIT_RING();
David Dawesab87c5d2002-02-14 02:00:26 +00002545 return 0;
2546}
2547
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002548static int radeon_cp_vertex2(DRM_IOCTL_ARGS)
David Dawesab87c5d2002-02-14 02:00:26 +00002549{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002550 DRM_DEVICE;
David Dawesab87c5d2002-02-14 02:00:26 +00002551 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002552 drm_file_t *filp_priv;
Dave Airlieef835972006-03-08 06:03:45 +00002553 drm_radeon_sarea_t *sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +00002554 drm_device_dma_t *dma = dev->dma;
2555 drm_buf_t *buf;
David Dawesab87c5d2002-02-14 02:00:26 +00002556 drm_radeon_vertex2_t vertex;
2557 int i;
2558 unsigned char laststate;
2559
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002560 LOCK_TEST_WITH_RETURN(dev, filp);
David Dawesab87c5d2002-02-14 02:00:26 +00002561
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002562 if (!dev_priv) {
2563 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002564 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002565 }
2566
Dave Airlieef835972006-03-08 06:03:45 +00002567 sarea_priv = dev_priv->sarea_priv;
2568
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002569 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002570
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002571 DRM_COPY_FROM_USER_IOCTL(vertex, (drm_radeon_vertex2_t __user *) data,
2572 sizeof(vertex));
David Dawesab87c5d2002-02-14 02:00:26 +00002573
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002574 DRM_DEBUG("pid=%d index=%d discard=%d\n",
2575 DRM_CURRENTPID, vertex.idx, vertex.discard);
David Dawesab87c5d2002-02-14 02:00:26 +00002576
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002577 if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
2578 DRM_ERROR("buffer index %d (of %d max)\n",
2579 vertex.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002580 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002581 }
2582
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002583 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2584 VB_AGE_TEST_WITH_RETURN(dev_priv);
David Dawesab87c5d2002-02-14 02:00:26 +00002585
2586 buf = dma->buflist[vertex.idx];
David Dawesab87c5d2002-02-14 02:00:26 +00002587
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002588 if (buf->filp != filp) {
2589 DRM_ERROR("process %d using buffer owned by %p\n",
2590 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002591 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002592 }
2593
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002594 if (buf->pending) {
2595 DRM_ERROR("sending pending buffer %d\n", vertex.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002596 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002597 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002598
Keith Whitwell2dcada32002-06-12 15:50:28 +00002599 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002600 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002601
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002602 for (laststate = 0xff, i = 0; i < vertex.nr_prims; i++) {
David Dawesab87c5d2002-02-14 02:00:26 +00002603 drm_radeon_prim_t prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002604 drm_radeon_tcl_prim_t tclprim;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002605
2606 if (DRM_COPY_FROM_USER(&prim, &vertex.prim[i], sizeof(prim)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002607 return DRM_ERR(EFAULT);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002608
2609 if (prim.stateidx != laststate) {
2610 drm_radeon_state_t state;
2611
2612 if (DRM_COPY_FROM_USER(&state,
2613 &vertex.state[prim.stateidx],
2614 sizeof(state)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002615 return DRM_ERR(EFAULT);
David Dawesab87c5d2002-02-14 02:00:26 +00002616
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002617 if (radeon_emit_state2(dev_priv, filp_priv, &state)) {
2618 DRM_ERROR("radeon_emit_state2 failed\n");
2619 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002620 }
David Dawesab87c5d2002-02-14 02:00:26 +00002621
2622 laststate = prim.stateidx;
2623 }
2624
Keith Whitwell2dcada32002-06-12 15:50:28 +00002625 tclprim.start = prim.start;
2626 tclprim.finish = prim.finish;
2627 tclprim.prim = prim.prim;
2628 tclprim.vc_format = prim.vc_format;
David Dawesab87c5d2002-02-14 02:00:26 +00002629
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002630 if (prim.prim & RADEON_PRIM_WALK_IND) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002631 tclprim.offset = prim.numverts * 64;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002632 tclprim.numverts = RADEON_MAX_VB_VERTS; /* duh */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002633
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002634 radeon_cp_dispatch_indices(dev, buf, &tclprim);
David Dawesab87c5d2002-02-14 02:00:26 +00002635 } else {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002636 tclprim.numverts = prim.numverts;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002637 tclprim.offset = 0; /* not used */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002638
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002639 radeon_cp_dispatch_vertex(dev, buf, &tclprim);
David Dawesab87c5d2002-02-14 02:00:26 +00002640 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002641
Keith Whitwell2dcada32002-06-12 15:50:28 +00002642 if (sarea_priv->nbox == 1)
2643 sarea_priv->nbox = 0;
David Dawesab87c5d2002-02-14 02:00:26 +00002644 }
2645
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002646 if (vertex.discard) {
2647 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002648 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002649
Keith Whitwell2dcada32002-06-12 15:50:28 +00002650 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002651 return 0;
2652}
Keith Whitwell2dcada32002-06-12 15:50:28 +00002653
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002654static int radeon_emit_packets(drm_radeon_private_t * dev_priv,
2655 drm_file_t * filp_priv,
2656 drm_radeon_cmd_header_t header,
Dave Airlie4791dc82006-02-18 02:53:36 +00002657 drm_radeon_kcmd_buffer_t *cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002658{
2659 int id = (int)header.packet.packet_id;
Brian Paulff25e702002-10-28 19:05:40 +00002660 int sz, reg;
Eric Anholt81459d62005-02-08 04:17:14 +00002661 int *data = (int *)cmdbuf->buf;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002662 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002663
Brian Paulff25e702002-10-28 19:05:40 +00002664 if (id >= RADEON_MAX_STATE_PACKETS)
2665 return DRM_ERR(EINVAL);
2666
2667 sz = packet[id].len;
2668 reg = packet[id].start;
2669
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002670 if (sz * sizeof(int) > cmdbuf->bufsz) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002671 DRM_ERROR("Packet size provided larger than data provided\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002672 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002673 }
2674
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002675 if (radeon_check_and_fixup_packets(dev_priv, filp_priv, id, data)) {
2676 DRM_ERROR("Packet verification failed\n");
2677 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002678 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002679
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002680 BEGIN_RING(sz + 1);
2681 OUT_RING(CP_PACKET0(reg, (sz - 1)));
Eric Anholt81459d62005-02-08 04:17:14 +00002682 OUT_RING_TABLE(data, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002683 ADVANCE_RING();
2684
2685 cmdbuf->buf += sz * sizeof(int);
2686 cmdbuf->bufsz -= sz * sizeof(int);
2687 return 0;
2688}
2689
Dave Airliebbcba832006-01-02 05:39:19 +00002690static __inline__ int radeon_emit_scalars(drm_radeon_private_t *dev_priv,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002691 drm_radeon_cmd_header_t header,
Dave Airliebbcba832006-01-02 05:39:19 +00002692 drm_radeon_kcmd_buffer_t *cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002693{
2694 int sz = header.scalars.count;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002695 int start = header.scalars.offset;
2696 int stride = header.scalars.stride;
2697 RING_LOCALS;
2698
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002699 BEGIN_RING(3 + sz);
2700 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2701 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2702 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
Eric Anholt81459d62005-02-08 04:17:14 +00002703 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002704 ADVANCE_RING();
2705 cmdbuf->buf += sz * sizeof(int);
2706 cmdbuf->bufsz -= sz * sizeof(int);
2707 return 0;
2708}
2709
Keith Whitwell48cc3502002-08-26 22:16:18 +00002710/* God this is ugly
2711 */
Dave Airliebbcba832006-01-02 05:39:19 +00002712static __inline__ int radeon_emit_scalars2(drm_radeon_private_t *dev_priv,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002713 drm_radeon_cmd_header_t header,
Dave Airliebbcba832006-01-02 05:39:19 +00002714 drm_radeon_kcmd_buffer_t *cmdbuf)
Keith Whitwell48cc3502002-08-26 22:16:18 +00002715{
2716 int sz = header.scalars.count;
Keith Whitwell48cc3502002-08-26 22:16:18 +00002717 int start = ((unsigned int)header.scalars.offset) + 0x100;
2718 int stride = header.scalars.stride;
2719 RING_LOCALS;
2720
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002721 BEGIN_RING(3 + sz);
2722 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2723 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2724 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
Eric Anholt81459d62005-02-08 04:17:14 +00002725 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell48cc3502002-08-26 22:16:18 +00002726 ADVANCE_RING();
2727 cmdbuf->buf += sz * sizeof(int);
2728 cmdbuf->bufsz -= sz * sizeof(int);
2729 return 0;
2730}
2731
Dave Airliebbcba832006-01-02 05:39:19 +00002732static __inline__ int radeon_emit_vectors(drm_radeon_private_t *dev_priv,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002733 drm_radeon_cmd_header_t header,
Dave Airliebbcba832006-01-02 05:39:19 +00002734 drm_radeon_kcmd_buffer_t *cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002735{
2736 int sz = header.vectors.count;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002737 int start = header.vectors.offset;
2738 int stride = header.vectors.stride;
2739 RING_LOCALS;
2740
Roland Scheideggere1b627c2006-05-20 09:08:18 +00002741 BEGIN_RING(5 + sz);
2742 OUT_RING_REG(RADEON_SE_TCL_STATE_FLUSH, 0);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002743 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0));
2744 OUT_RING(start | (stride << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
2745 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1)));
Eric Anholt81459d62005-02-08 04:17:14 +00002746 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002747 ADVANCE_RING();
2748
2749 cmdbuf->buf += sz * sizeof(int);
2750 cmdbuf->bufsz -= sz * sizeof(int);
2751 return 0;
2752}
2753
Roland Scheideggerf4e6e442006-05-24 18:36:24 +00002754static __inline__ int radeon_emit_veclinear(drm_radeon_private_t *dev_priv,
2755 drm_radeon_cmd_header_t header,
2756 drm_radeon_kcmd_buffer_t *cmdbuf)
2757{
2758 int sz = header.veclinear.count * 4;
2759 int start = header.veclinear.addr_lo | (header.veclinear.addr_hi << 8);
2760 RING_LOCALS;
2761
Dave Airlie9b984b32006-08-19 17:59:18 +10002762 if (!sz)
2763 return 0;
2764 if (sz * 4 > cmdbuf->bufsz)
2765 return DRM_ERR(EINVAL);
Roland Scheideggerf4e6e442006-05-24 18:36:24 +00002766
2767 BEGIN_RING(5 + sz);
2768 OUT_RING_REG(RADEON_SE_TCL_STATE_FLUSH, 0);
2769 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0));
2770 OUT_RING(start | (1 << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
2771 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1)));
2772 OUT_RING_TABLE(cmdbuf->buf, sz);
2773 ADVANCE_RING();
2774
2775 cmdbuf->buf += sz * sizeof(int);
2776 cmdbuf->bufsz -= sz * sizeof(int);
2777 return 0;
2778}
2779
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002780static int radeon_emit_packet3(drm_device_t * dev,
2781 drm_file_t * filp_priv,
Dave Airlie4791dc82006-02-18 02:53:36 +00002782 drm_radeon_kcmd_buffer_t *cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002783{
2784 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002785 unsigned int cmdsz;
Dave Airlie02df04d2004-07-25 08:47:38 +00002786 int ret;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002787 RING_LOCALS;
2788
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002789 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002790
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002791 if ((ret = radeon_check_and_fixup_packet3(dev_priv, filp_priv,
2792 cmdbuf, &cmdsz))) {
2793 DRM_ERROR("Packet verification failed\n");
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002794 return ret;
2795 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002796
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002797 BEGIN_RING(cmdsz);
Eric Anholt81459d62005-02-08 04:17:14 +00002798 OUT_RING_TABLE(cmdbuf->buf, cmdsz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002799 ADVANCE_RING();
2800
2801 cmdbuf->buf += cmdsz * 4;
2802 cmdbuf->bufsz -= cmdsz * 4;
2803 return 0;
2804}
2805
Dave Airliebbcba832006-01-02 05:39:19 +00002806static int radeon_emit_packet3_cliprect(drm_device_t *dev,
2807 drm_file_t *filp_priv,
2808 drm_radeon_kcmd_buffer_t *cmdbuf,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002809 int orig_nbox)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002810{
2811 drm_radeon_private_t *dev_priv = dev->dev_private;
2812 drm_clip_rect_t box;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002813 unsigned int cmdsz;
Dave Airlie02df04d2004-07-25 08:47:38 +00002814 int ret;
2815 drm_clip_rect_t __user *boxes = cmdbuf->boxes;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002816 int i = 0;
2817 RING_LOCALS;
2818
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002819 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002820
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002821 if ((ret = radeon_check_and_fixup_packet3(dev_priv, filp_priv,
2822 cmdbuf, &cmdsz))) {
2823 DRM_ERROR("Packet verification failed\n");
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002824 return ret;
2825 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002826
Keith Whitwell33d57132002-08-12 07:26:00 +00002827 if (!orig_nbox)
2828 goto out;
2829
Keith Whitwell2dcada32002-06-12 15:50:28 +00002830 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002831 if (i < cmdbuf->nbox) {
Eric Anholt81459d62005-02-08 04:17:14 +00002832 if (DRM_COPY_FROM_USER(&box, &boxes[i], sizeof(box)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002833 return DRM_ERR(EFAULT);
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002834 /* FIXME The second and subsequent times round
2835 * this loop, send a WAIT_UNTIL_3D_IDLE before
2836 * calling emit_clip_rect(). This fixes a
2837 * lockup on fast machines when sending
2838 * several cliprects with a cmdbuf, as when
2839 * waving a 2D window over a 3D
2840 * window. Something in the commands from user
2841 * space seems to hang the card when they're
2842 * sent several times in a row. That would be
2843 * the correct place to fix it but this works
2844 * around it until I can figure that out - Tim
2845 * Smith */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002846 if (i) {
2847 BEGIN_RING(2);
Tim Smith8fa8db12002-07-17 08:30:36 +00002848 RADEON_WAIT_UNTIL_3D_IDLE();
2849 ADVANCE_RING();
2850 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002851 radeon_emit_clip_rect(dev_priv, &box);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002852 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002853
2854 BEGIN_RING(cmdsz);
Eric Anholt81459d62005-02-08 04:17:14 +00002855 OUT_RING_TABLE(cmdbuf->buf, cmdsz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002856 ADVANCE_RING();
2857
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002858 } while (++i < cmdbuf->nbox);
2859 if (cmdbuf->nbox == 1)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002860 cmdbuf->nbox = 0;
2861
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002862 out:
Keith Whitwell2dcada32002-06-12 15:50:28 +00002863 cmdbuf->buf += cmdsz * 4;
2864 cmdbuf->bufsz -= cmdsz * 4;
2865 return 0;
2866}
2867
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002868static int radeon_emit_wait(drm_device_t * dev, int flags)
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002869{
2870 drm_radeon_private_t *dev_priv = dev->dev_private;
2871 RING_LOCALS;
2872
2873 DRM_DEBUG("%s: %x\n", __FUNCTION__, flags);
2874 switch (flags) {
2875 case RADEON_WAIT_2D:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002876 BEGIN_RING(2);
2877 RADEON_WAIT_UNTIL_2D_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002878 ADVANCE_RING();
2879 break;
2880 case RADEON_WAIT_3D:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002881 BEGIN_RING(2);
2882 RADEON_WAIT_UNTIL_3D_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002883 ADVANCE_RING();
2884 break;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002885 case RADEON_WAIT_2D | RADEON_WAIT_3D:
2886 BEGIN_RING(2);
2887 RADEON_WAIT_UNTIL_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002888 ADVANCE_RING();
2889 break;
2890 default:
2891 return DRM_ERR(EINVAL);
2892 }
2893
2894 return 0;
2895}
Keith Whitwell2dcada32002-06-12 15:50:28 +00002896
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002897static int radeon_cp_cmdbuf(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002898{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002899 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002900 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002901 drm_file_t *filp_priv;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002902 drm_device_dma_t *dma = dev->dma;
Dave Airlie8efddd02004-07-15 13:03:55 +00002903 drm_buf_t *buf = NULL;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002904 int idx;
Dave Airliebbcba832006-01-02 05:39:19 +00002905 drm_radeon_kcmd_buffer_t cmdbuf;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002906 drm_radeon_cmd_header_t header;
Eric Anholt81459d62005-02-08 04:17:14 +00002907 int orig_nbox, orig_bufsz;
Eric Anholt2f7cd382005-02-14 03:22:58 +00002908 char *kbuf = NULL;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002909
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002910 LOCK_TEST_WITH_RETURN(dev, filp);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002911
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002912 if (!dev_priv) {
2913 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002914 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002915 }
2916
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002917 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002918
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002919 DRM_COPY_FROM_USER_IOCTL(cmdbuf,
Eric Anholt6cb366b2006-04-08 09:45:43 +00002920 (drm_radeon_kcmd_buffer_t __user *) data,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002921 sizeof(cmdbuf));
Keith Whitwell2dcada32002-06-12 15:50:28 +00002922
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002923 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2924 VB_AGE_TEST_WITH_RETURN(dev_priv);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002925
Eric Anholt81459d62005-02-08 04:17:14 +00002926 if (cmdbuf.bufsz > 64 * 1024 || cmdbuf.bufsz < 0) {
2927 return DRM_ERR(EINVAL);
2928 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002929
Eric Anholt81459d62005-02-08 04:17:14 +00002930 /* Allocate an in-kernel area and copy in the cmdbuf. Do this to avoid
2931 * races between checking values and using those values in other code,
2932 * and simply to avoid a lot of function calls to copy in data.
2933 */
2934 orig_bufsz = cmdbuf.bufsz;
2935 if (orig_bufsz != 0) {
2936 kbuf = drm_alloc(cmdbuf.bufsz, DRM_MEM_DRIVER);
2937 if (kbuf == NULL)
2938 return DRM_ERR(ENOMEM);
Dave Airliebbcba832006-01-02 05:39:19 +00002939 if (DRM_COPY_FROM_USER(kbuf, (void __user *)cmdbuf.buf,
2940 cmdbuf.bufsz)) {
Eric Anholt2f7cd382005-02-14 03:22:58 +00002941 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
Eric Anholt81459d62005-02-08 04:17:14 +00002942 return DRM_ERR(EFAULT);
Eric Anholt2f7cd382005-02-14 03:22:58 +00002943 }
Eric Anholt81459d62005-02-08 04:17:14 +00002944 cmdbuf.buf = kbuf;
2945 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002946
Keith Whitwell33d57132002-08-12 07:26:00 +00002947 orig_nbox = cmdbuf.nbox;
Dave Airlie4791dc82006-02-18 02:53:36 +00002948
2949 if (dev_priv->microcode_version == UCODE_R300) {
Eric Anholtab59dd22005-07-20 21:17:47 +00002950 int temp;
Dave Airlie4791dc82006-02-18 02:53:36 +00002951 temp = r300_do_cp_cmdbuf(dev, filp, filp_priv, &cmdbuf);
2952
Eric Anholtab59dd22005-07-20 21:17:47 +00002953 if (orig_bufsz != 0)
2954 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
Dave Airlie4791dc82006-02-18 02:53:36 +00002955
Eric Anholtab59dd22005-07-20 21:17:47 +00002956 return temp;
2957 }
Dave Airlie4791dc82006-02-18 02:53:36 +00002958
Eric Anholtab59dd22005-07-20 21:17:47 +00002959 /* microcode_version != r300 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002960 while (cmdbuf.bufsz >= sizeof(header)) {
Dave Airlie4791dc82006-02-18 02:53:36 +00002961
Eric Anholt81459d62005-02-08 04:17:14 +00002962 header.i = *(int *)cmdbuf.buf;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002963 cmdbuf.buf += sizeof(header);
2964 cmdbuf.bufsz -= sizeof(header);
2965
2966 switch (header.header.cmd_type) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002967 case RADEON_CMD_PACKET:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002968 DRM_DEBUG("RADEON_CMD_PACKET\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002969 if (radeon_emit_packets
2970 (dev_priv, filp_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002971 DRM_ERROR("radeon_emit_packets failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002972 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002973 }
2974 break;
2975
2976 case RADEON_CMD_SCALARS:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002977 DRM_DEBUG("RADEON_CMD_SCALARS\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002978 if (radeon_emit_scalars(dev_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002979 DRM_ERROR("radeon_emit_scalars failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002980 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002981 }
2982 break;
2983
2984 case RADEON_CMD_VECTORS:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002985 DRM_DEBUG("RADEON_CMD_VECTORS\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002986 if (radeon_emit_vectors(dev_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002987 DRM_ERROR("radeon_emit_vectors failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002988 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002989 }
2990 break;
2991
2992 case RADEON_CMD_DMA_DISCARD:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002993 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002994 idx = header.dma.buf_idx;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002995 if (idx < 0 || idx >= dma->buf_count) {
2996 DRM_ERROR("buffer index %d (of %d max)\n",
2997 idx, dma->buf_count - 1);
Eric Anholt81459d62005-02-08 04:17:14 +00002998 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002999 }
3000
3001 buf = dma->buflist[idx];
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003002 if (buf->filp != filp || buf->pending) {
3003 DRM_ERROR("bad buffer %p %p %d\n",
3004 buf->filp, filp, buf->pending);
Eric Anholt81459d62005-02-08 04:17:14 +00003005 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003006 }
3007
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003008 radeon_cp_discard_buffer(dev, buf);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003009 break;
3010
3011 case RADEON_CMD_PACKET3:
Keith Whitwell48cc3502002-08-26 22:16:18 +00003012 DRM_DEBUG("RADEON_CMD_PACKET3\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003013 if (radeon_emit_packet3(dev, filp_priv, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00003014 DRM_ERROR("radeon_emit_packet3 failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00003015 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003016 }
3017 break;
3018
3019 case RADEON_CMD_PACKET3_CLIP:
Keith Whitwell48cc3502002-08-26 22:16:18 +00003020 DRM_DEBUG("RADEON_CMD_PACKET3_CLIP\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003021 if (radeon_emit_packet3_cliprect
3022 (dev, filp_priv, &cmdbuf, orig_nbox)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00003023 DRM_ERROR("radeon_emit_packet3_clip failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00003024 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003025 }
3026 break;
3027
Keith Whitwell48cc3502002-08-26 22:16:18 +00003028 case RADEON_CMD_SCALARS2:
3029 DRM_DEBUG("RADEON_CMD_SCALARS2\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003030 if (radeon_emit_scalars2(dev_priv, header, &cmdbuf)) {
Keith Whitwell48cc3502002-08-26 22:16:18 +00003031 DRM_ERROR("radeon_emit_scalars2 failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00003032 goto err;
Keith Whitwell48cc3502002-08-26 22:16:18 +00003033 }
3034 break;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00003035
3036 case RADEON_CMD_WAIT:
3037 DRM_DEBUG("RADEON_CMD_WAIT\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003038 if (radeon_emit_wait(dev, header.wait.flags)) {
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00003039 DRM_ERROR("radeon_emit_wait failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00003040 goto err;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00003041 }
3042 break;
Roland Scheideggerf4e6e442006-05-24 18:36:24 +00003043 case RADEON_CMD_VECLINEAR:
3044 DRM_DEBUG("RADEON_CMD_VECLINEAR\n");
3045 if (radeon_emit_veclinear(dev_priv, header, &cmdbuf)) {
3046 DRM_ERROR("radeon_emit_veclinear failed\n");
3047 goto err;
3048 }
3049 break;
3050
Keith Whitwell2dcada32002-06-12 15:50:28 +00003051 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003052 DRM_ERROR("bad cmd_type %d at %p\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00003053 header.header.cmd_type,
3054 cmdbuf.buf - sizeof(header));
Eric Anholt81459d62005-02-08 04:17:14 +00003055 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003056 }
3057 }
3058
Eric Anholt81459d62005-02-08 04:17:14 +00003059 if (orig_bufsz != 0)
3060 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
Dave Airlie4791dc82006-02-18 02:53:36 +00003061
Keith Whitwell48cc3502002-08-26 22:16:18 +00003062 DRM_DEBUG("DONE\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00003063 COMMIT_RING();
3064 return 0;
Eric Anholt81459d62005-02-08 04:17:14 +00003065
Dave Airlie4791dc82006-02-18 02:53:36 +00003066 err:
Eric Anholt81459d62005-02-08 04:17:14 +00003067 if (orig_bufsz != 0)
3068 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
3069 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003070}
3071
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003072static int radeon_cp_getparam(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00003073{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00003074 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003075 drm_radeon_private_t *dev_priv = dev->dev_private;
3076 drm_radeon_getparam_t param;
3077 int value;
3078
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003079 if (!dev_priv) {
3080 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00003081 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003082 }
3083
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003084 DRM_COPY_FROM_USER_IOCTL(param, (drm_radeon_getparam_t __user *) data,
3085 sizeof(param));
Keith Whitwell2dcada32002-06-12 15:50:28 +00003086
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003087 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003088
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003089 switch (param.param) {
Michel Daenzer062751a2003-08-26 15:44:01 +00003090 case RADEON_PARAM_GART_BUFFER_OFFSET:
3091 value = dev_priv->gart_buffers_offset;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003092 break;
Michel Daenzerfd86ac92002-07-11 20:31:12 +00003093 case RADEON_PARAM_LAST_FRAME:
Keith Whitwell48cc3502002-08-26 22:16:18 +00003094 dev_priv->stats.last_frame_reads++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003095 value = GET_SCRATCH(0);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00003096 break;
3097 case RADEON_PARAM_LAST_DISPATCH:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003098 value = GET_SCRATCH(1);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00003099 break;
3100 case RADEON_PARAM_LAST_CLEAR:
Keith Whitwell48cc3502002-08-26 22:16:18 +00003101 dev_priv->stats.last_clear_reads++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003102 value = GET_SCRATCH(2);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00003103 break;
Michel Daenzerf40674e2002-09-25 19:48:51 +00003104 case RADEON_PARAM_IRQ_NR:
3105 value = dev->irq;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00003106 break;
Michel Daenzer062751a2003-08-26 15:44:01 +00003107 case RADEON_PARAM_GART_BASE:
3108 value = dev_priv->gart_vm_start;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00003109 break;
Keith Whitwell13211ad2003-04-22 09:49:14 +00003110 case RADEON_PARAM_REGISTER_HANDLE:
Jon Smirlbb9502a2005-08-04 13:59:48 +00003111 value = dev_priv->mmio->offset;
Keith Whitwell13211ad2003-04-22 09:49:14 +00003112 break;
3113 case RADEON_PARAM_STATUS_HANDLE:
3114 value = dev_priv->ring_rptr_offset;
3115 break;
Eric Anholt13272222006-04-18 06:12:22 +00003116#ifndef __LP64__
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003117 /*
3118 * This ioctl() doesn't work on 64-bit platforms because hw_lock is a
3119 * pointer which can't fit into an int-sized variable. According to
3120 * Michel Dänzer, the ioctl() is only used on embedded platforms, so
3121 * not supporting it shouldn't be a problem. If the same functionality
3122 * is needed on 64-bit platforms, a new ioctl() would have to be added,
3123 * so backwards-compatibility for the embedded platforms can be
3124 * maintained. --davidm 4-Feb-2004.
3125 */
Keith Whitwell13211ad2003-04-22 09:49:14 +00003126 case RADEON_PARAM_SAREA_HANDLE:
3127 /* The lock is the first dword in the sarea. */
Dave Airlie4cfd0d52004-07-05 11:44:30 +00003128 value = (long)dev->lock.hw_lock;
3129 break;
Dave Airliebc142802004-04-08 12:05:25 +00003130#endif
Michel Daenzer062751a2003-08-26 15:44:01 +00003131 case RADEON_PARAM_GART_TEX_HANDLE:
3132 value = dev_priv->gart_textures_offset;
Keith Whitwell13211ad2003-04-22 09:49:14 +00003133 break;
Michel Dänzere337ead2006-07-19 19:07:06 +02003134 case RADEON_PARAM_SCRATCH_OFFSET:
3135 if (!dev_priv->writeback_works)
3136 return DRM_ERR(EINVAL);
3137 value = RADEON_SCRATCH_REG_OFFSET;
3138 break;
Dave Airlie780e90e2005-12-29 00:17:51 +00003139
3140 case RADEON_PARAM_CARD_TYPE:
Dave Airlie3cc64a92006-09-12 06:13:14 +10003141 if (dev_priv->flags & RADEON_IS_PCIE)
Dave Airlie780e90e2005-12-29 00:17:51 +00003142 value = RADEON_CARD_PCIE;
Dave Airlie3cc64a92006-09-12 06:13:14 +10003143 else if (dev_priv->flags & RADEON_IS_AGP)
Dave Airlie780e90e2005-12-29 00:17:51 +00003144 value = RADEON_CARD_AGP;
3145 else
3146 value = RADEON_CARD_PCI;
3147 break;
Keith Whitwell2dcada32002-06-12 15:50:28 +00003148 default:
Michel Dänzer7dea6462006-07-19 19:01:33 +02003149 DRM_DEBUG( "Invalid parameter %d\n", param.param );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00003150 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003151 }
3152
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003153 if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
3154 DRM_ERROR("copy_to_user\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00003155 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00003156 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003157
Keith Whitwell2dcada32002-06-12 15:50:28 +00003158 return 0;
3159}
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003160
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003161static int radeon_cp_setparam(DRM_IOCTL_ARGS)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003162{
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003163 DRM_DEVICE;
3164 drm_radeon_private_t *dev_priv = dev->dev_private;
3165 drm_file_t *filp_priv;
3166 drm_radeon_setparam_t sp;
Dave Airlied4dbf452004-08-24 11:15:53 +00003167 struct drm_radeon_driver_file_fields *radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003168
3169 if (!dev_priv) {
3170 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
3171 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003172 }
3173
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003174 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003175
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003176 DRM_COPY_FROM_USER_IOCTL(sp, (drm_radeon_setparam_t __user *) data,
3177 sizeof(sp));
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003178
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003179 switch (sp.param) {
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003180 case RADEON_SETPARAM_FB_LOCATION:
Dave Airlied4dbf452004-08-24 11:15:53 +00003181 radeon_priv = filp_priv->driver_priv;
3182 radeon_priv->radeon_fb_delta = dev_priv->fb_location - sp.value;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003183 break;
Roland Scheidegger43c32232005-01-26 17:48:59 +00003184 case RADEON_SETPARAM_SWITCH_TILING:
3185 if (sp.value == 0) {
Dave Airlie4791dc82006-02-18 02:53:36 +00003186 DRM_DEBUG("color tiling disabled\n");
Roland Scheidegger43c32232005-01-26 17:48:59 +00003187 dev_priv->front_pitch_offset &= ~RADEON_DST_TILE_MACRO;
3188 dev_priv->back_pitch_offset &= ~RADEON_DST_TILE_MACRO;
3189 dev_priv->sarea_priv->tiling_enabled = 0;
Dave Airlie4791dc82006-02-18 02:53:36 +00003190 } else if (sp.value == 1) {
3191 DRM_DEBUG("color tiling enabled\n");
Roland Scheidegger43c32232005-01-26 17:48:59 +00003192 dev_priv->front_pitch_offset |= RADEON_DST_TILE_MACRO;
3193 dev_priv->back_pitch_offset |= RADEON_DST_TILE_MACRO;
3194 dev_priv->sarea_priv->tiling_enabled = 1;
3195 }
3196 break;
Dave Airlie5565a002005-09-11 08:51:23 +00003197 case RADEON_SETPARAM_PCIGART_LOCATION:
3198 dev_priv->pcigart_offset = sp.value;
3199 break;
Dave Airlie9fad1012006-02-18 03:04:30 +00003200 case RADEON_SETPARAM_NEW_MEMMAP:
3201 dev_priv->new_memmap = sp.value;
3202 break;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003203 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003204 DRM_DEBUG("Invalid parameter %d\n", sp.param);
3205 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00003206 }
3207
3208 return 0;
3209}
Dave Airlie5c9ed832004-08-17 13:10:05 +00003210
3211/* When a client dies:
3212 * - Check for and clean up flipped page state
3213 * - Free any alloced GART memory.
Roland Scheidegger43c32232005-01-26 17:48:59 +00003214 * - Free any alloced radeon surfaces.
Dave Airlie5c9ed832004-08-17 13:10:05 +00003215 *
3216 * DRM infrastructure takes care of reclaiming dma buffers.
3217 */
Eric Anholtc789ea12005-08-05 03:50:23 +00003218void radeon_driver_preclose(drm_device_t * dev, DRMFILE filp)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003219{
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003220 if (dev->dev_private) {
3221 drm_radeon_private_t *dev_priv = dev->dev_private;
3222 if (dev_priv->page_flipping) {
3223 radeon_do_cleanup_pageflip(dev);
3224 }
3225 radeon_mem_release(filp, dev_priv->gart_heap);
3226 radeon_mem_release(filp, dev_priv->fb_heap);
Roland Scheidegger43c32232005-01-26 17:48:59 +00003227 radeon_surfaces_release(filp, dev_priv);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003228 }
Dave Airlie5c9ed832004-08-17 13:10:05 +00003229}
3230
Eric Anholtc789ea12005-08-05 03:50:23 +00003231void radeon_driver_lastclose(drm_device_t * dev)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003232{
3233 radeon_do_release(dev);
3234}
3235
Eric Anholtc789ea12005-08-05 03:50:23 +00003236int radeon_driver_open(drm_device_t * dev, drm_file_t * filp_priv)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003237{
3238 drm_radeon_private_t *dev_priv = dev->dev_private;
Dave Airlied4dbf452004-08-24 11:15:53 +00003239 struct drm_radeon_driver_file_fields *radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003240
Dave Airlie77045dc2005-01-27 09:13:42 +00003241 DRM_DEBUG("\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003242 radeon_priv =
3243 (struct drm_radeon_driver_file_fields *)
3244 drm_alloc(sizeof(*radeon_priv), DRM_MEM_FILES);
3245
Dave Airlied4dbf452004-08-24 11:15:53 +00003246 if (!radeon_priv)
3247 return -ENOMEM;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003248
Dave Airlied4dbf452004-08-24 11:15:53 +00003249 filp_priv->driver_priv = radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003250
3251 if (dev_priv)
Dave Airlied4dbf452004-08-24 11:15:53 +00003252 radeon_priv->radeon_fb_delta = dev_priv->fb_location;
Dave Airlie5c9ed832004-08-17 13:10:05 +00003253 else
Dave Airlied4dbf452004-08-24 11:15:53 +00003254 radeon_priv->radeon_fb_delta = 0;
3255 return 0;
3256}
3257
Eric Anholtc789ea12005-08-05 03:50:23 +00003258void radeon_driver_postclose(drm_device_t * dev, drm_file_t * filp_priv)
Dave Airlied4dbf452004-08-24 11:15:53 +00003259{
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003260 struct drm_radeon_driver_file_fields *radeon_priv =
3261 filp_priv->driver_priv;
3262
Jon Smirlfa6b1d12004-09-27 19:51:38 +00003263 drm_free(radeon_priv, sizeof(*radeon_priv), DRM_MEM_FILES);
Dave Airlie5c9ed832004-08-17 13:10:05 +00003264}
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003265
3266drm_ioctl_desc_t radeon_ioctls[] = {
Dave Airlie0d346a02005-09-03 03:27:14 +00003267 [DRM_IOCTL_NR(DRM_RADEON_CP_INIT)] = {radeon_cp_init, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3268 [DRM_IOCTL_NR(DRM_RADEON_CP_START)] = {radeon_cp_start, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3269 [DRM_IOCTL_NR(DRM_RADEON_CP_STOP)] = {radeon_cp_stop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3270 [DRM_IOCTL_NR(DRM_RADEON_CP_RESET)] = {radeon_cp_reset, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3271 [DRM_IOCTL_NR(DRM_RADEON_CP_IDLE)] = {radeon_cp_idle, DRM_AUTH},
3272 [DRM_IOCTL_NR(DRM_RADEON_CP_RESUME)] = {radeon_cp_resume, DRM_AUTH},
3273 [DRM_IOCTL_NR(DRM_RADEON_RESET)] = {radeon_engine_reset, DRM_AUTH},
3274 [DRM_IOCTL_NR(DRM_RADEON_FULLSCREEN)] = {radeon_fullscreen, DRM_AUTH},
3275 [DRM_IOCTL_NR(DRM_RADEON_SWAP)] = {radeon_cp_swap, DRM_AUTH},
3276 [DRM_IOCTL_NR(DRM_RADEON_CLEAR)] = {radeon_cp_clear, DRM_AUTH},
3277 [DRM_IOCTL_NR(DRM_RADEON_VERTEX)] = {radeon_cp_vertex, DRM_AUTH},
3278 [DRM_IOCTL_NR(DRM_RADEON_INDICES)] = {radeon_cp_indices, DRM_AUTH},
3279 [DRM_IOCTL_NR(DRM_RADEON_TEXTURE)] = {radeon_cp_texture, DRM_AUTH},
3280 [DRM_IOCTL_NR(DRM_RADEON_STIPPLE)] = {radeon_cp_stipple, DRM_AUTH},
3281 [DRM_IOCTL_NR(DRM_RADEON_INDIRECT)] = {radeon_cp_indirect, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3282 [DRM_IOCTL_NR(DRM_RADEON_VERTEX2)] = {radeon_cp_vertex2, DRM_AUTH},
3283 [DRM_IOCTL_NR(DRM_RADEON_CMDBUF)] = {radeon_cp_cmdbuf, DRM_AUTH},
3284 [DRM_IOCTL_NR(DRM_RADEON_GETPARAM)] = {radeon_cp_getparam, DRM_AUTH},
3285 [DRM_IOCTL_NR(DRM_RADEON_FLIP)] = {radeon_cp_flip, DRM_AUTH},
3286 [DRM_IOCTL_NR(DRM_RADEON_ALLOC)] = {radeon_mem_alloc, DRM_AUTH},
3287 [DRM_IOCTL_NR(DRM_RADEON_FREE)] = {radeon_mem_free, DRM_AUTH},
3288 [DRM_IOCTL_NR(DRM_RADEON_INIT_HEAP)] = {radeon_mem_init_heap, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY},
3289 [DRM_IOCTL_NR(DRM_RADEON_IRQ_EMIT)] = {radeon_irq_emit, DRM_AUTH},
3290 [DRM_IOCTL_NR(DRM_RADEON_IRQ_WAIT)] = {radeon_irq_wait, DRM_AUTH},
3291 [DRM_IOCTL_NR(DRM_RADEON_SETPARAM)] = {radeon_cp_setparam, DRM_AUTH},
3292 [DRM_IOCTL_NR(DRM_RADEON_SURF_ALLOC)] = {radeon_surface_alloc, DRM_AUTH},
3293 [DRM_IOCTL_NR(DRM_RADEON_SURF_FREE)] = {radeon_surface_free, DRM_AUTH}
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003294};
3295
3296int radeon_max_ioctl = DRM_ARRAY_SIZE(radeon_ioctls);