blob: 44719bd930a16fb9c29ac7cfcc3ef2fd18937b7e [file] [log] [blame]
Kevin E Martin0994e632001-01-05 22:57:55 +00001/* radeon_state.c -- State support for Radeon -*- linux-c -*-
2 *
3 * Copyright 2000 VA Linux Systems, Inc., Fremont, California.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 *
25 * Authors:
David Dawes0e5b8d72001-03-19 17:45:52 +000026 * Gareth Hughes <gareth@valinux.com>
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000027 * Kevin E. Martin <martin@valinux.com>
Kevin E Martin0994e632001-01-05 22:57:55 +000028 */
29
Kevin E Martin0994e632001-01-05 22:57:55 +000030#include "drmP.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000031#include "drm.h"
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +000032#include "drm_sarea.h"
Jens Owen3903e5a2002-04-09 21:54:56 +000033#include "radeon_drm.h"
34#include "radeon_drv.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000035
Kevin E Martin0994e632001-01-05 22:57:55 +000036/* ================================================================
Michel Daenzer2655ccd2003-11-04 00:46:05 +000037 * Helper functions for client state checking and fixup
38 */
39
Jon Smirl9f9a8f12004-09-30 21:12:10 +000040static __inline__ int radeon_check_and_fixup_offset(drm_radeon_private_t *
41 dev_priv,
42 drm_file_t * filp_priv,
43 u32 * offset)
44{
Michel Daenzer2655ccd2003-11-04 00:46:05 +000045 u32 off = *offset;
Dave Airlied4dbf452004-08-24 11:15:53 +000046 struct drm_radeon_driver_file_fields *radeon_priv;
Michel Daenzer2655ccd2003-11-04 00:46:05 +000047
Jon Smirl9f9a8f12004-09-30 21:12:10 +000048 if (off >= dev_priv->fb_location &&
49 off < (dev_priv->gart_vm_start + dev_priv->gart_size))
Michel Daenzer2655ccd2003-11-04 00:46:05 +000050 return 0;
51
Dave Airlied4dbf452004-08-24 11:15:53 +000052 radeon_priv = filp_priv->driver_priv;
53
54 off += radeon_priv->radeon_fb_delta;
Michel Daenzer2655ccd2003-11-04 00:46:05 +000055
Jon Smirl9f9a8f12004-09-30 21:12:10 +000056 DRM_DEBUG("offset fixed up to 0x%x\n", off);
Michel Daenzer2655ccd2003-11-04 00:46:05 +000057
Jon Smirl9f9a8f12004-09-30 21:12:10 +000058 if (off < dev_priv->fb_location ||
59 off >= (dev_priv->gart_vm_start + dev_priv->gart_size))
60 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +000061
62 *offset = off;
63
64 return 0;
65}
66
Jon Smirl9f9a8f12004-09-30 21:12:10 +000067static __inline__ int radeon_check_and_fixup_packets(drm_radeon_private_t *
68 dev_priv,
69 drm_file_t * filp_priv,
70 int id, u32 __user * data)
71{
72 switch (id) {
Michel Daenzer0dea4de2004-01-10 20:59:16 +000073
74 case RADEON_EMIT_PP_MISC:
Eric Anholt81459d62005-02-08 04:17:14 +000075 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
76 &data[(RADEON_RB3D_DEPTHOFFSET - RADEON_PP_MISC) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +000077 DRM_ERROR("Invalid depth buffer offset\n");
78 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +000079 }
80 break;
81
82 case RADEON_EMIT_PP_CNTL:
Eric Anholt81459d62005-02-08 04:17:14 +000083 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
84 &data[(RADEON_RB3D_COLOROFFSET - RADEON_PP_CNTL) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +000085 DRM_ERROR("Invalid colour buffer offset\n");
86 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +000087 }
88 break;
89
90 case R200_EMIT_PP_TXOFFSET_0:
91 case R200_EMIT_PP_TXOFFSET_1:
92 case R200_EMIT_PP_TXOFFSET_2:
93 case R200_EMIT_PP_TXOFFSET_3:
94 case R200_EMIT_PP_TXOFFSET_4:
95 case R200_EMIT_PP_TXOFFSET_5:
Eric Anholt81459d62005-02-08 04:17:14 +000096 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
97 &data[0])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +000098 DRM_ERROR("Invalid R200 texture offset\n");
99 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000100 }
101 break;
102
103 case RADEON_EMIT_PP_TXFILTER_0:
104 case RADEON_EMIT_PP_TXFILTER_1:
105 case RADEON_EMIT_PP_TXFILTER_2:
Eric Anholt81459d62005-02-08 04:17:14 +0000106 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
107 &data[(RADEON_PP_TXOFFSET_0 - RADEON_PP_TXFILTER_0) / 4])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000108 DRM_ERROR("Invalid R100 texture offset\n");
109 return DRM_ERR(EINVAL);
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000110 }
111 break;
112
113 case R200_EMIT_PP_CUBIC_OFFSETS_0:
114 case R200_EMIT_PP_CUBIC_OFFSETS_1:
115 case R200_EMIT_PP_CUBIC_OFFSETS_2:
116 case R200_EMIT_PP_CUBIC_OFFSETS_3:
117 case R200_EMIT_PP_CUBIC_OFFSETS_4:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000118 case R200_EMIT_PP_CUBIC_OFFSETS_5:{
119 int i;
120 for (i = 0; i < 5; i++) {
Eric Anholt81459d62005-02-08 04:17:14 +0000121 if (radeon_check_and_fixup_offset(dev_priv,
122 filp_priv,
123 &data[i])) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000124 DRM_ERROR
125 ("Invalid R200 cubic texture offset\n");
126 return DRM_ERR(EINVAL);
127 }
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000128 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000129 break;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000130 }
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000131
Roland Scheidegger732cdc52005-02-10 19:22:43 +0000132 case RADEON_EMIT_PP_CUBIC_OFFSETS_T0:
133 case RADEON_EMIT_PP_CUBIC_OFFSETS_T1:
134 case RADEON_EMIT_PP_CUBIC_OFFSETS_T2:{
135 int i;
136 for (i = 0; i < 5; i++) {
137 if (radeon_check_and_fixup_offset(dev_priv,
138 filp_priv,
139 &data[i])) {
140 DRM_ERROR
141 ("Invalid R100 cubic texture offset\n");
142 return DRM_ERR(EINVAL);
143 }
144 }
145 }
146 break;
147
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000148 case RADEON_EMIT_RB3D_COLORPITCH:
149 case RADEON_EMIT_RE_LINE_PATTERN:
150 case RADEON_EMIT_SE_LINE_WIDTH:
151 case RADEON_EMIT_PP_LUM_MATRIX:
152 case RADEON_EMIT_PP_ROT_MATRIX_0:
153 case RADEON_EMIT_RB3D_STENCILREFMASK:
154 case RADEON_EMIT_SE_VPORT_XSCALE:
155 case RADEON_EMIT_SE_CNTL:
156 case RADEON_EMIT_SE_CNTL_STATUS:
157 case RADEON_EMIT_RE_MISC:
158 case RADEON_EMIT_PP_BORDER_COLOR_0:
159 case RADEON_EMIT_PP_BORDER_COLOR_1:
160 case RADEON_EMIT_PP_BORDER_COLOR_2:
161 case RADEON_EMIT_SE_ZBIAS_FACTOR:
162 case RADEON_EMIT_SE_TCL_OUTPUT_VTX_FMT:
163 case RADEON_EMIT_SE_TCL_MATERIAL_EMMISSIVE_RED:
164 case R200_EMIT_PP_TXCBLEND_0:
165 case R200_EMIT_PP_TXCBLEND_1:
166 case R200_EMIT_PP_TXCBLEND_2:
167 case R200_EMIT_PP_TXCBLEND_3:
168 case R200_EMIT_PP_TXCBLEND_4:
169 case R200_EMIT_PP_TXCBLEND_5:
170 case R200_EMIT_PP_TXCBLEND_6:
171 case R200_EMIT_PP_TXCBLEND_7:
172 case R200_EMIT_TCL_LIGHT_MODEL_CTL_0:
173 case R200_EMIT_TFACTOR_0:
174 case R200_EMIT_VTX_FMT_0:
175 case R200_EMIT_VAP_CTL:
176 case R200_EMIT_MATRIX_SELECT_0:
177 case R200_EMIT_TEX_PROC_CTL_2:
178 case R200_EMIT_TCL_UCP_VERT_BLEND_CTL:
179 case R200_EMIT_PP_TXFILTER_0:
180 case R200_EMIT_PP_TXFILTER_1:
181 case R200_EMIT_PP_TXFILTER_2:
182 case R200_EMIT_PP_TXFILTER_3:
183 case R200_EMIT_PP_TXFILTER_4:
184 case R200_EMIT_PP_TXFILTER_5:
185 case R200_EMIT_VTE_CNTL:
186 case R200_EMIT_OUTPUT_VTX_COMP_SEL:
187 case R200_EMIT_PP_TAM_DEBUG3:
188 case R200_EMIT_PP_CNTL_X:
189 case R200_EMIT_RB3D_DEPTHXY_OFFSET:
190 case R200_EMIT_RE_AUX_SCISSOR_CNTL:
191 case R200_EMIT_RE_SCISSOR_TL_0:
192 case R200_EMIT_RE_SCISSOR_TL_1:
193 case R200_EMIT_RE_SCISSOR_TL_2:
194 case R200_EMIT_SE_VAP_CNTL_STATUS:
195 case R200_EMIT_SE_VTX_STATE_CNTL:
196 case R200_EMIT_RE_POINTSIZE:
197 case R200_EMIT_TCL_INPUT_VTX_VECTOR_ADDR_0:
198 case R200_EMIT_PP_CUBIC_FACES_0:
199 case R200_EMIT_PP_CUBIC_FACES_1:
200 case R200_EMIT_PP_CUBIC_FACES_2:
201 case R200_EMIT_PP_CUBIC_FACES_3:
202 case R200_EMIT_PP_CUBIC_FACES_4:
203 case R200_EMIT_PP_CUBIC_FACES_5:
204 case RADEON_EMIT_PP_TEX_SIZE_0:
205 case RADEON_EMIT_PP_TEX_SIZE_1:
206 case RADEON_EMIT_PP_TEX_SIZE_2:
Roland Scheidegger43c244e2004-05-18 23:30:46 +0000207 case R200_EMIT_RB3D_BLENDCOLOR:
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000208 case R200_EMIT_TCL_POINT_SPRITE_CNTL:
Roland Scheidegger732cdc52005-02-10 19:22:43 +0000209 case RADEON_EMIT_PP_CUBIC_FACES_0:
210 case RADEON_EMIT_PP_CUBIC_FACES_1:
211 case RADEON_EMIT_PP_CUBIC_FACES_2:
Roland Scheidegger34563922005-03-15 22:12:30 +0000212 case R200_EMIT_PP_TRI_PERF_CNTL:
Michel Daenzer0dea4de2004-01-10 20:59:16 +0000213 /* These packets don't contain memory offsets */
214 break;
215
216 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000217 DRM_ERROR("Unknown state packet ID %d\n", id);
218 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000219 }
220
221 return 0;
222}
223
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000224static __inline__ int radeon_check_and_fixup_packet3(drm_radeon_private_t *
225 dev_priv,
226 drm_file_t * filp_priv,
227 drm_radeon_cmd_buffer_t *
228 cmdbuf,
229 unsigned int *cmdsz)
230{
Eric Anholt81459d62005-02-08 04:17:14 +0000231 u32 *cmd = (u32 *) cmdbuf->buf;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000232
Eric Anholt81459d62005-02-08 04:17:14 +0000233 *cmdsz = 2 + ((cmd[0] & RADEON_CP_PACKET_COUNT_MASK) >> 16);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000234
Eric Anholt81459d62005-02-08 04:17:14 +0000235 if ((cmd[0] & 0xc0000000) != RADEON_CP_PACKET3) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000236 DRM_ERROR("Not a type 3 packet\n");
237 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000238 }
239
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000240 if (4 * *cmdsz > cmdbuf->bufsz) {
241 DRM_ERROR("Packet size larger than size of data provided\n");
242 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000243 }
244
245 /* Check client state and fix it up if necessary */
Eric Anholt81459d62005-02-08 04:17:14 +0000246 if (cmd[0] & 0x8000) { /* MSB of opcode: next DWORD GUI_CNTL */
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000247 u32 offset;
248
Eric Anholt81459d62005-02-08 04:17:14 +0000249 if (cmd[1] & (RADEON_GMC_SRC_PITCH_OFFSET_CNTL
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000250 | RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
Eric Anholt81459d62005-02-08 04:17:14 +0000251 offset = cmd[2] << 10;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000252 if (radeon_check_and_fixup_offset
253 (dev_priv, filp_priv, &offset)) {
254 DRM_ERROR("Invalid first packet offset\n");
255 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000256 }
Eric Anholt81459d62005-02-08 04:17:14 +0000257 cmd[2] = (cmd[2] & 0xffc00000) | offset >> 10;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000258 }
259
Eric Anholt81459d62005-02-08 04:17:14 +0000260 if ((cmd[1] & RADEON_GMC_SRC_PITCH_OFFSET_CNTL) &&
261 (cmd[1] & RADEON_GMC_DST_PITCH_OFFSET_CNTL)) {
262 offset = cmd[3] << 10;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000263 if (radeon_check_and_fixup_offset
264 (dev_priv, filp_priv, &offset)) {
265 DRM_ERROR("Invalid second packet offset\n");
266 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000267 }
Eric Anholt81459d62005-02-08 04:17:14 +0000268 cmd[3] = (cmd[3] & 0xffc00000) | offset >> 10;
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000269 }
270 }
271
272 return 0;
273}
274
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000275/* ================================================================
Kevin E Martin0994e632001-01-05 22:57:55 +0000276 * CP hardware state programming functions
277 */
278
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000279static __inline__ void radeon_emit_clip_rect(drm_radeon_private_t * dev_priv,
280 drm_clip_rect_t * box)
Kevin E Martin0994e632001-01-05 22:57:55 +0000281{
282 RING_LOCALS;
283
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000284 DRM_DEBUG(" box: x1=%d y1=%d x2=%d y2=%d\n",
285 box->x1, box->y1, box->x2, box->y2);
Kevin E Martin0994e632001-01-05 22:57:55 +0000286
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000287 BEGIN_RING(4);
288 OUT_RING(CP_PACKET0(RADEON_RE_TOP_LEFT, 0));
289 OUT_RING((box->y1 << 16) | box->x1);
290 OUT_RING(CP_PACKET0(RADEON_RE_WIDTH_HEIGHT, 0));
291 OUT_RING(((box->y2 - 1) << 16) | (box->x2 - 1));
Kevin E Martin0994e632001-01-05 22:57:55 +0000292 ADVANCE_RING();
293}
294
Keith Whitwell2dcada32002-06-12 15:50:28 +0000295/* Emit 1.1 state
296 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000297static int radeon_emit_state(drm_radeon_private_t * dev_priv,
298 drm_file_t * filp_priv,
299 drm_radeon_context_regs_t * ctx,
300 drm_radeon_texture_regs_t * tex,
301 unsigned int dirty)
Kevin E Martin0994e632001-01-05 22:57:55 +0000302{
Kevin E Martin0994e632001-01-05 22:57:55 +0000303 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000304 DRM_DEBUG("dirty=0x%08x\n", dirty);
Kevin E Martin0994e632001-01-05 22:57:55 +0000305
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000306 if (dirty & RADEON_UPLOAD_CONTEXT) {
307 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
308 &ctx->rb3d_depthoffset)) {
309 DRM_ERROR("Invalid depth buffer offset\n");
310 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000311 }
312
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000313 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
314 &ctx->rb3d_coloroffset)) {
315 DRM_ERROR("Invalid depth buffer offset\n");
316 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000317 }
318
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000319 BEGIN_RING(14);
320 OUT_RING(CP_PACKET0(RADEON_PP_MISC, 6));
321 OUT_RING(ctx->pp_misc);
322 OUT_RING(ctx->pp_fog_color);
323 OUT_RING(ctx->re_solid_color);
324 OUT_RING(ctx->rb3d_blendcntl);
325 OUT_RING(ctx->rb3d_depthoffset);
326 OUT_RING(ctx->rb3d_depthpitch);
327 OUT_RING(ctx->rb3d_zstencilcntl);
328 OUT_RING(CP_PACKET0(RADEON_PP_CNTL, 2));
329 OUT_RING(ctx->pp_cntl);
330 OUT_RING(ctx->rb3d_cntl);
331 OUT_RING(ctx->rb3d_coloroffset);
332 OUT_RING(CP_PACKET0(RADEON_RB3D_COLORPITCH, 0));
333 OUT_RING(ctx->rb3d_colorpitch);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000334 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000335 }
336
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000337 if (dirty & RADEON_UPLOAD_VERTFMT) {
338 BEGIN_RING(2);
339 OUT_RING(CP_PACKET0(RADEON_SE_COORD_FMT, 0));
340 OUT_RING(ctx->se_coord_fmt);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000341 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000342 }
343
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000344 if (dirty & RADEON_UPLOAD_LINE) {
345 BEGIN_RING(5);
346 OUT_RING(CP_PACKET0(RADEON_RE_LINE_PATTERN, 1));
347 OUT_RING(ctx->re_line_pattern);
348 OUT_RING(ctx->re_line_state);
349 OUT_RING(CP_PACKET0(RADEON_SE_LINE_WIDTH, 0));
350 OUT_RING(ctx->se_line_width);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000351 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000352 }
353
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000354 if (dirty & RADEON_UPLOAD_BUMPMAP) {
355 BEGIN_RING(5);
356 OUT_RING(CP_PACKET0(RADEON_PP_LUM_MATRIX, 0));
357 OUT_RING(ctx->pp_lum_matrix);
358 OUT_RING(CP_PACKET0(RADEON_PP_ROT_MATRIX_0, 1));
359 OUT_RING(ctx->pp_rot_matrix_0);
360 OUT_RING(ctx->pp_rot_matrix_1);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000361 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000362 }
363
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000364 if (dirty & RADEON_UPLOAD_MASKS) {
365 BEGIN_RING(4);
366 OUT_RING(CP_PACKET0(RADEON_RB3D_STENCILREFMASK, 2));
367 OUT_RING(ctx->rb3d_stencilrefmask);
368 OUT_RING(ctx->rb3d_ropcntl);
369 OUT_RING(ctx->rb3d_planemask);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000370 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000371 }
372
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000373 if (dirty & RADEON_UPLOAD_VIEWPORT) {
374 BEGIN_RING(7);
375 OUT_RING(CP_PACKET0(RADEON_SE_VPORT_XSCALE, 5));
376 OUT_RING(ctx->se_vport_xscale);
377 OUT_RING(ctx->se_vport_xoffset);
378 OUT_RING(ctx->se_vport_yscale);
379 OUT_RING(ctx->se_vport_yoffset);
380 OUT_RING(ctx->se_vport_zscale);
381 OUT_RING(ctx->se_vport_zoffset);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000382 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000383 }
384
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000385 if (dirty & RADEON_UPLOAD_SETUP) {
386 BEGIN_RING(4);
387 OUT_RING(CP_PACKET0(RADEON_SE_CNTL, 0));
388 OUT_RING(ctx->se_cntl);
389 OUT_RING(CP_PACKET0(RADEON_SE_CNTL_STATUS, 0));
390 OUT_RING(ctx->se_cntl_status);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000391 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000392 }
393
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000394 if (dirty & RADEON_UPLOAD_MISC) {
395 BEGIN_RING(2);
396 OUT_RING(CP_PACKET0(RADEON_RE_MISC, 0));
397 OUT_RING(ctx->re_misc);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000398 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000399 }
400
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000401 if (dirty & RADEON_UPLOAD_TEX0) {
402 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
403 &tex[0].pp_txoffset)) {
404 DRM_ERROR("Invalid texture offset for unit 0\n");
405 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000406 }
407
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000408 BEGIN_RING(9);
409 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_0, 5));
410 OUT_RING(tex[0].pp_txfilter);
411 OUT_RING(tex[0].pp_txformat);
412 OUT_RING(tex[0].pp_txoffset);
413 OUT_RING(tex[0].pp_txcblend);
414 OUT_RING(tex[0].pp_txablend);
415 OUT_RING(tex[0].pp_tfactor);
416 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_0, 0));
417 OUT_RING(tex[0].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000418 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000419 }
420
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000421 if (dirty & RADEON_UPLOAD_TEX1) {
422 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
423 &tex[1].pp_txoffset)) {
424 DRM_ERROR("Invalid texture offset for unit 1\n");
425 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000426 }
427
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000428 BEGIN_RING(9);
429 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_1, 5));
430 OUT_RING(tex[1].pp_txfilter);
431 OUT_RING(tex[1].pp_txformat);
432 OUT_RING(tex[1].pp_txoffset);
433 OUT_RING(tex[1].pp_txcblend);
434 OUT_RING(tex[1].pp_txablend);
435 OUT_RING(tex[1].pp_tfactor);
436 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_1, 0));
437 OUT_RING(tex[1].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000438 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000439 }
440
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000441 if (dirty & RADEON_UPLOAD_TEX2) {
442 if (radeon_check_and_fixup_offset(dev_priv, filp_priv,
443 &tex[2].pp_txoffset)) {
444 DRM_ERROR("Invalid texture offset for unit 2\n");
445 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000446 }
447
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000448 BEGIN_RING(9);
449 OUT_RING(CP_PACKET0(RADEON_PP_TXFILTER_2, 5));
450 OUT_RING(tex[2].pp_txfilter);
451 OUT_RING(tex[2].pp_txformat);
452 OUT_RING(tex[2].pp_txoffset);
453 OUT_RING(tex[2].pp_txcblend);
454 OUT_RING(tex[2].pp_txablend);
455 OUT_RING(tex[2].pp_tfactor);
456 OUT_RING(CP_PACKET0(RADEON_PP_BORDER_COLOR_2, 0));
457 OUT_RING(tex[2].pp_border_color);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000458 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000459 }
Michel Daenzer2655ccd2003-11-04 00:46:05 +0000460
461 return 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000462}
463
Keith Whitwell2dcada32002-06-12 15:50:28 +0000464/* Emit 1.2 state
465 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000466static int radeon_emit_state2(drm_radeon_private_t * dev_priv,
467 drm_file_t * filp_priv,
468 drm_radeon_state_t * state)
David Dawesab87c5d2002-02-14 02:00:26 +0000469{
470 RING_LOCALS;
David Dawesab87c5d2002-02-14 02:00:26 +0000471
Keith Whitwell2dcada32002-06-12 15:50:28 +0000472 if (state->dirty & RADEON_UPLOAD_ZBIAS) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000473 BEGIN_RING(3);
474 OUT_RING(CP_PACKET0(RADEON_SE_ZBIAS_FACTOR, 1));
475 OUT_RING(state->context2.se_zbias_factor);
476 OUT_RING(state->context2.se_zbias_constant);
Keith Whitwell2dcada32002-06-12 15:50:28 +0000477 ADVANCE_RING();
478 }
David Dawesab87c5d2002-02-14 02:00:26 +0000479
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000480 return radeon_emit_state(dev_priv, filp_priv, &state->context,
481 state->tex, state->dirty);
David Dawesab87c5d2002-02-14 02:00:26 +0000482}
483
Keith Whitwell2dcada32002-06-12 15:50:28 +0000484/* New (1.3) state mechanism. 3 commands (packet, scalar, vector) in
485 * 1.3 cmdbuffers allow all previous state to be updated as well as
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000486 * the tcl scalar and vector areas.
Keith Whitwell2dcada32002-06-12 15:50:28 +0000487 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000488static struct {
489 int start;
490 int len;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000491 const char *name;
492} packet[RADEON_MAX_STATE_PACKETS] = {
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000493 {
494 RADEON_PP_MISC, 7, "RADEON_PP_MISC"}, {
495 RADEON_PP_CNTL, 3, "RADEON_PP_CNTL"}, {
496 RADEON_RB3D_COLORPITCH, 1, "RADEON_RB3D_COLORPITCH"}, {
497 RADEON_RE_LINE_PATTERN, 2, "RADEON_RE_LINE_PATTERN"}, {
498 RADEON_SE_LINE_WIDTH, 1, "RADEON_SE_LINE_WIDTH"}, {
499 RADEON_PP_LUM_MATRIX, 1, "RADEON_PP_LUM_MATRIX"}, {
500 RADEON_PP_ROT_MATRIX_0, 2, "RADEON_PP_ROT_MATRIX_0"}, {
501 RADEON_RB3D_STENCILREFMASK, 3, "RADEON_RB3D_STENCILREFMASK"}, {
502 RADEON_SE_VPORT_XSCALE, 6, "RADEON_SE_VPORT_XSCALE"}, {
503 RADEON_SE_CNTL, 2, "RADEON_SE_CNTL"}, {
504 RADEON_SE_CNTL_STATUS, 1, "RADEON_SE_CNTL_STATUS"}, {
505 RADEON_RE_MISC, 1, "RADEON_RE_MISC"}, {
506 RADEON_PP_TXFILTER_0, 6, "RADEON_PP_TXFILTER_0"}, {
507 RADEON_PP_BORDER_COLOR_0, 1, "RADEON_PP_BORDER_COLOR_0"}, {
508 RADEON_PP_TXFILTER_1, 6, "RADEON_PP_TXFILTER_1"}, {
509 RADEON_PP_BORDER_COLOR_1, 1, "RADEON_PP_BORDER_COLOR_1"}, {
510 RADEON_PP_TXFILTER_2, 6, "RADEON_PP_TXFILTER_2"}, {
511 RADEON_PP_BORDER_COLOR_2, 1, "RADEON_PP_BORDER_COLOR_2"}, {
512 RADEON_SE_ZBIAS_FACTOR, 2, "RADEON_SE_ZBIAS_FACTOR"}, {
513 RADEON_SE_TCL_OUTPUT_VTX_FMT, 11, "RADEON_SE_TCL_OUTPUT_VTX_FMT"}, {
514 RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED, 17,
515 "RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED"}, {
516 R200_PP_TXCBLEND_0, 4, "R200_PP_TXCBLEND_0"}, {
517 R200_PP_TXCBLEND_1, 4, "R200_PP_TXCBLEND_1"}, {
518 R200_PP_TXCBLEND_2, 4, "R200_PP_TXCBLEND_2"}, {
519 R200_PP_TXCBLEND_3, 4, "R200_PP_TXCBLEND_3"}, {
520 R200_PP_TXCBLEND_4, 4, "R200_PP_TXCBLEND_4"}, {
521 R200_PP_TXCBLEND_5, 4, "R200_PP_TXCBLEND_5"}, {
522 R200_PP_TXCBLEND_6, 4, "R200_PP_TXCBLEND_6"}, {
523 R200_PP_TXCBLEND_7, 4, "R200_PP_TXCBLEND_7"}, {
524 R200_SE_TCL_LIGHT_MODEL_CTL_0, 6, "R200_SE_TCL_LIGHT_MODEL_CTL_0"},
525 {
526 R200_PP_TFACTOR_0, 6, "R200_PP_TFACTOR_0"}, {
527 R200_SE_VTX_FMT_0, 4, "R200_SE_VTX_FMT_0"}, {
528 R200_SE_VAP_CNTL, 1, "R200_SE_VAP_CNTL"}, {
529 R200_SE_TCL_MATRIX_SEL_0, 5, "R200_SE_TCL_MATRIX_SEL_0"}, {
530 R200_SE_TCL_TEX_PROC_CTL_2, 5, "R200_SE_TCL_TEX_PROC_CTL_2"}, {
531 R200_SE_TCL_UCP_VERT_BLEND_CTL, 1, "R200_SE_TCL_UCP_VERT_BLEND_CTL"},
532 {
533 R200_PP_TXFILTER_0, 6, "R200_PP_TXFILTER_0"}, {
534 R200_PP_TXFILTER_1, 6, "R200_PP_TXFILTER_1"}, {
535 R200_PP_TXFILTER_2, 6, "R200_PP_TXFILTER_2"}, {
536 R200_PP_TXFILTER_3, 6, "R200_PP_TXFILTER_3"}, {
537 R200_PP_TXFILTER_4, 6, "R200_PP_TXFILTER_4"}, {
538 R200_PP_TXFILTER_5, 6, "R200_PP_TXFILTER_5"}, {
539 R200_PP_TXOFFSET_0, 1, "R200_PP_TXOFFSET_0"}, {
540 R200_PP_TXOFFSET_1, 1, "R200_PP_TXOFFSET_1"}, {
541 R200_PP_TXOFFSET_2, 1, "R200_PP_TXOFFSET_2"}, {
542 R200_PP_TXOFFSET_3, 1, "R200_PP_TXOFFSET_3"}, {
543 R200_PP_TXOFFSET_4, 1, "R200_PP_TXOFFSET_4"}, {
544 R200_PP_TXOFFSET_5, 1, "R200_PP_TXOFFSET_5"}, {
545 R200_SE_VTE_CNTL, 1, "R200_SE_VTE_CNTL"}, {
546 R200_SE_TCL_OUTPUT_VTX_COMP_SEL, 1, "R200_SE_TCL_OUTPUT_VTX_COMP_SEL"},
547 {
548 R200_PP_TAM_DEBUG3, 1, "R200_PP_TAM_DEBUG3"}, {
549 R200_PP_CNTL_X, 1, "R200_PP_CNTL_X"}, {
550 R200_RB3D_DEPTHXY_OFFSET, 1, "R200_RB3D_DEPTHXY_OFFSET"}, {
551 R200_RE_AUX_SCISSOR_CNTL, 1, "R200_RE_AUX_SCISSOR_CNTL"}, {
552 R200_RE_SCISSOR_TL_0, 2, "R200_RE_SCISSOR_TL_0"}, {
553 R200_RE_SCISSOR_TL_1, 2, "R200_RE_SCISSOR_TL_1"}, {
554 R200_RE_SCISSOR_TL_2, 2, "R200_RE_SCISSOR_TL_2"}, {
555 R200_SE_VAP_CNTL_STATUS, 1, "R200_SE_VAP_CNTL_STATUS"}, {
556 R200_SE_VTX_STATE_CNTL, 1, "R200_SE_VTX_STATE_CNTL"}, {
557 R200_RE_POINTSIZE, 1, "R200_RE_POINTSIZE"}, {
558 R200_SE_TCL_INPUT_VTX_VECTOR_ADDR_0, 4,
559 "R200_SE_TCL_INPUT_VTX_VECTOR_ADDR_0"}, {
560 R200_PP_CUBIC_FACES_0, 1, "R200_PP_CUBIC_FACES_0"}, /* 61 */
561 {
562 R200_PP_CUBIC_OFFSET_F1_0, 5, "R200_PP_CUBIC_OFFSET_F1_0"}, /* 62 */
563 {
564 R200_PP_CUBIC_FACES_1, 1, "R200_PP_CUBIC_FACES_1"}, {
565 R200_PP_CUBIC_OFFSET_F1_1, 5, "R200_PP_CUBIC_OFFSET_F1_1"}, {
566 R200_PP_CUBIC_FACES_2, 1, "R200_PP_CUBIC_FACES_2"}, {
567 R200_PP_CUBIC_OFFSET_F1_2, 5, "R200_PP_CUBIC_OFFSET_F1_2"}, {
568 R200_PP_CUBIC_FACES_3, 1, "R200_PP_CUBIC_FACES_3"}, {
569 R200_PP_CUBIC_OFFSET_F1_3, 5, "R200_PP_CUBIC_OFFSET_F1_3"}, {
570 R200_PP_CUBIC_FACES_4, 1, "R200_PP_CUBIC_FACES_4"}, {
571 R200_PP_CUBIC_OFFSET_F1_4, 5, "R200_PP_CUBIC_OFFSET_F1_4"}, {
572 R200_PP_CUBIC_FACES_5, 1, "R200_PP_CUBIC_FACES_5"}, {
573 R200_PP_CUBIC_OFFSET_F1_5, 5, "R200_PP_CUBIC_OFFSET_F1_5"}, {
574 RADEON_PP_TEX_SIZE_0, 2, "RADEON_PP_TEX_SIZE_0"}, {
575 RADEON_PP_TEX_SIZE_1, 2, "RADEON_PP_TEX_SIZE_1"}, {
576 RADEON_PP_TEX_SIZE_2, 2, "RADEON_PP_TEX_SIZE_2"}, {
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000577 R200_RB3D_BLENDCOLOR, 3, "R200_RB3D_BLENDCOLOR"}, {
578 R200_SE_TCL_POINT_SPRITE_CNTL, 1, "R200_SE_TCL_POINT_SPRITE_CNTL"},
Roland Scheidegger732cdc52005-02-10 19:22:43 +0000579 {
580 RADEON_PP_CUBIC_FACES_0, 1, "RADEON_PP_CUBIC_FACES_0"}, {
581 RADEON_PP_CUBIC_OFFSET_T0_0, 5, "RADEON_PP_CUBIC_OFFSET_T0_0"}, {
582 RADEON_PP_CUBIC_FACES_1, 1, "RADEON_PP_CUBIC_FACES_1"}, {
583 RADEON_PP_CUBIC_OFFSET_T1_0, 5, "RADEON_PP_CUBIC_OFFSET_T1_0"}, {
584 RADEON_PP_CUBIC_FACES_2, 1, "RADEON_PP_CUBIC_FACES_2"}, {
Roland Scheidegger34563922005-03-15 22:12:30 +0000585 RADEON_PP_CUBIC_OFFSET_T2_0, 5, "RADEON_PP_CUBIC_OFFSET_T2_0"}, {
586 R200_PP_TRI_PERF, 2, "R200_PP_TRI_PERF"},
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000587};
Keith Whitwell2dcada32002-06-12 15:50:28 +0000588
Kevin E Martin0994e632001-01-05 22:57:55 +0000589/* ================================================================
590 * Performance monitoring functions
591 */
592
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000593static void radeon_clear_box(drm_radeon_private_t * dev_priv,
594 int x, int y, int w, int h, int r, int g, int b)
Kevin E Martin0994e632001-01-05 22:57:55 +0000595{
Kevin E Martin0994e632001-01-05 22:57:55 +0000596 u32 color;
597 RING_LOCALS;
598
Keith Whitwell48cc3502002-08-26 22:16:18 +0000599 x += dev_priv->sarea_priv->boxes[0].x1;
600 y += dev_priv->sarea_priv->boxes[0].y1;
601
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000602 switch (dev_priv->color_fmt) {
Kevin E Martin0994e632001-01-05 22:57:55 +0000603 case RADEON_COLOR_FORMAT_RGB565:
604 color = (((r & 0xf8) << 8) |
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000605 ((g & 0xfc) << 3) | ((b & 0xf8) >> 3));
Kevin E Martin0994e632001-01-05 22:57:55 +0000606 break;
607 case RADEON_COLOR_FORMAT_ARGB8888:
608 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000609 color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
Kevin E Martin0994e632001-01-05 22:57:55 +0000610 break;
611 }
612
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000613 BEGIN_RING(4);
614 RADEON_WAIT_UNTIL_3D_IDLE();
615 OUT_RING(CP_PACKET0(RADEON_DP_WRITE_MASK, 0));
616 OUT_RING(0xffffffff);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000617 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000618
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000619 BEGIN_RING(6);
Kevin E Martin0994e632001-01-05 22:57:55 +0000620
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000621 OUT_RING(CP_PACKET3(RADEON_CNTL_PAINT_MULTI, 4));
622 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
623 RADEON_GMC_BRUSH_SOLID_COLOR |
624 (dev_priv->color_fmt << 8) |
625 RADEON_GMC_SRC_DATATYPE_COLOR |
626 RADEON_ROP3_P | RADEON_GMC_CLR_CMP_CNTL_DIS);
Kevin E Martin0994e632001-01-05 22:57:55 +0000627
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000628 if (dev_priv->page_flipping && dev_priv->current_page == 1) {
629 OUT_RING(dev_priv->front_pitch_offset);
630 } else {
631 OUT_RING(dev_priv->back_pitch_offset);
632 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000633
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000634 OUT_RING(color);
Kevin E Martin0994e632001-01-05 22:57:55 +0000635
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000636 OUT_RING((x << 16) | y);
637 OUT_RING((w << 16) | h);
Kevin E Martin0994e632001-01-05 22:57:55 +0000638
639 ADVANCE_RING();
640}
641
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000642static void radeon_cp_performance_boxes(drm_radeon_private_t * dev_priv)
Kevin E Martin0994e632001-01-05 22:57:55 +0000643{
Keith Whitwell48cc3502002-08-26 22:16:18 +0000644 /* Collapse various things into a wait flag -- trying to
645 * guess if userspase slept -- better just to have them tell us.
646 */
647 if (dev_priv->stats.last_frame_reads > 1 ||
648 dev_priv->stats.last_clear_reads > dev_priv->stats.clears) {
649 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
Kevin E Martin0994e632001-01-05 22:57:55 +0000650 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000651
652 if (dev_priv->stats.freelist_loops) {
653 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
654 }
655
656 /* Purple box for page flipping
657 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000658 if (dev_priv->stats.boxes & RADEON_BOX_FLIP)
659 radeon_clear_box(dev_priv, 4, 4, 8, 8, 255, 0, 255);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000660
661 /* Red box if we have to wait for idle at any point
662 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000663 if (dev_priv->stats.boxes & RADEON_BOX_WAIT_IDLE)
664 radeon_clear_box(dev_priv, 16, 4, 8, 8, 255, 0, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000665
666 /* Blue box: lost context?
667 */
668
669 /* Yellow box for texture swaps
670 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000671 if (dev_priv->stats.boxes & RADEON_BOX_TEXTURE_LOAD)
672 radeon_clear_box(dev_priv, 40, 4, 8, 8, 255, 255, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000673
674 /* Green box if hardware never idles (as far as we can tell)
675 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000676 if (!(dev_priv->stats.boxes & RADEON_BOX_DMA_IDLE))
677 radeon_clear_box(dev_priv, 64, 4, 8, 8, 0, 255, 0);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000678
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000679 /* Draw bars indicating number of buffers allocated
Keith Whitwell48cc3502002-08-26 22:16:18 +0000680 * (not a great measure, easily confused)
681 */
682 if (dev_priv->stats.requested_bufs) {
683 if (dev_priv->stats.requested_bufs > 100)
684 dev_priv->stats.requested_bufs = 100;
685
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000686 radeon_clear_box(dev_priv, 4, 16,
687 dev_priv->stats.requested_bufs, 4,
688 196, 128, 128);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000689 }
690
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000691 memset(&dev_priv->stats, 0, sizeof(dev_priv->stats));
Keith Whitwell48cc3502002-08-26 22:16:18 +0000692
Kevin E Martin0994e632001-01-05 22:57:55 +0000693}
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000694
Kevin E Martin0994e632001-01-05 22:57:55 +0000695/* ================================================================
696 * CP command dispatch functions
697 */
698
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000699static void radeon_cp_dispatch_clear(drm_device_t * dev,
700 drm_radeon_clear_t * clear,
701 drm_radeon_clear_rect_t * depth_boxes)
Kevin E Martin0994e632001-01-05 22:57:55 +0000702{
703 drm_radeon_private_t *dev_priv = dev->dev_private;
704 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +0000705 drm_radeon_depth_clear_t *depth_clear = &dev_priv->depth_clear;
Kevin E Martin0994e632001-01-05 22:57:55 +0000706 int nbox = sarea_priv->nbox;
707 drm_clip_rect_t *pbox = sarea_priv->boxes;
708 unsigned int flags = clear->flags;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000709 u32 rb3d_cntl = 0, rb3d_stencilrefmask = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000710 int i;
711 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000712 DRM_DEBUG("flags = 0x%x\n", flags);
Kevin E Martin0994e632001-01-05 22:57:55 +0000713
Keith Whitwell48cc3502002-08-26 22:16:18 +0000714 dev_priv->stats.clears++;
715
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000716 if (dev_priv->page_flipping && dev_priv->current_page == 1) {
Kevin E Martin0994e632001-01-05 22:57:55 +0000717 unsigned int tmp = flags;
718
719 flags &= ~(RADEON_FRONT | RADEON_BACK);
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000720 if (tmp & RADEON_FRONT)
721 flags |= RADEON_BACK;
722 if (tmp & RADEON_BACK)
723 flags |= RADEON_FRONT;
Kevin E Martin0994e632001-01-05 22:57:55 +0000724 }
725
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000726 if (flags & (RADEON_FRONT | RADEON_BACK)) {
Keith Whitwell48cc3502002-08-26 22:16:18 +0000727
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000728 BEGIN_RING(4);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000729
730 /* Ensure the 3D stream is idle before doing a
731 * 2D fill to clear the front or back buffer.
732 */
733 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000734
735 OUT_RING(CP_PACKET0(RADEON_DP_WRITE_MASK, 0));
736 OUT_RING(clear->color_mask);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000737
738 ADVANCE_RING();
739
740 /* Make sure we restore the 3D state next time.
741 */
742 dev_priv->sarea_priv->ctx_owner = 0;
743
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000744 for (i = 0; i < nbox; i++) {
Keith Whitwell48cc3502002-08-26 22:16:18 +0000745 int x = pbox[i].x1;
746 int y = pbox[i].y1;
747 int w = pbox[i].x2 - x;
748 int h = pbox[i].y2 - y;
749
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000750 DRM_DEBUG("dispatch clear %d,%d-%d,%d flags 0x%x\n",
751 x, y, w, h, flags);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000752
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000753 if (flags & RADEON_FRONT) {
754 BEGIN_RING(6);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000755
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000756 OUT_RING(CP_PACKET3
757 (RADEON_CNTL_PAINT_MULTI, 4));
758 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
759 RADEON_GMC_BRUSH_SOLID_COLOR |
760 (dev_priv->
761 color_fmt << 8) |
762 RADEON_GMC_SRC_DATATYPE_COLOR |
763 RADEON_ROP3_P |
764 RADEON_GMC_CLR_CMP_CNTL_DIS);
765
766 OUT_RING(dev_priv->front_pitch_offset);
767 OUT_RING(clear->clear_color);
768
769 OUT_RING((x << 16) | y);
770 OUT_RING((w << 16) | h);
771
Keith Whitwell48cc3502002-08-26 22:16:18 +0000772 ADVANCE_RING();
773 }
Keith Whitwell48cc3502002-08-26 22:16:18 +0000774
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000775 if (flags & RADEON_BACK) {
776 BEGIN_RING(6);
777
778 OUT_RING(CP_PACKET3
779 (RADEON_CNTL_PAINT_MULTI, 4));
780 OUT_RING(RADEON_GMC_DST_PITCH_OFFSET_CNTL |
781 RADEON_GMC_BRUSH_SOLID_COLOR |
782 (dev_priv->
783 color_fmt << 8) |
784 RADEON_GMC_SRC_DATATYPE_COLOR |
785 RADEON_ROP3_P |
786 RADEON_GMC_CLR_CMP_CNTL_DIS);
787
788 OUT_RING(dev_priv->back_pitch_offset);
789 OUT_RING(clear->clear_color);
790
791 OUT_RING((x << 16) | y);
792 OUT_RING((w << 16) | h);
Keith Whitwell48cc3502002-08-26 22:16:18 +0000793
794 ADVANCE_RING();
795 }
796 }
797 }
798
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000799 /* hyper z clear */
800 /* no docs available, based on reverse engeneering by Stephane Marchesin */
801 if ((flags & (RADEON_DEPTH | RADEON_STENCIL)) && (flags & RADEON_CLEAR_FASTZ)) {
802
803 int i;
804 int depthpixperline = dev_priv->depth_fmt==RADEON_DEPTH_FORMAT_16BIT_INT_Z?
805 (dev_priv->depth_pitch / 2): (dev_priv->depth_pitch / 4);
806
807 u32 clearmask;
808
809 u32 tempRB3D_DEPTHCLEARVALUE = clear->clear_depth |
810 ((clear->depth_mask & 0xff) << 24);
811
812
813 /* Make sure we restore the 3D state next time.
814 * we haven't touched any "normal" state - still need this?
815 */
816 dev_priv->sarea_priv->ctx_owner = 0;
817
818 if ((dev_priv->flags & CHIP_HAS_HIERZ) && (flags & RADEON_USE_HIERZ)) {
819 /* FIXME : reverse engineer that for Rx00 cards */
820 /* FIXME : the mask supposedly contains low-res z values. So can't set
821 just to the max (0xff? or actually 0x3fff?), need to take z clear
822 value into account? */
823 /* pattern seems to work for r100, though get slight
824 rendering errors with glxgears. If hierz is not enabled for r100,
825 only 4 bits which indicate clear (15,16,31,32, all zero) matter, the
826 other ones are ignored, and the same clear mask can be used. That's
827 very different behaviour than R200 which needs different clear mask
828 and different number of tiles to clear if hierz is enabled or not !?!
829 */
830 clearmask = (0xff<<22)|(0xff<<6)| 0x003f003f;
831 }
832 else {
833 /* clear mask : chooses the clearing pattern.
834 rv250: could be used to clear only parts of macrotiles
835 (but that would get really complicated...)?
836 bit 0 and 1 (either or both of them ?!?!) are used to
837 not clear tile (or maybe one of the bits indicates if the tile is
838 compressed or not), bit 2 and 3 to not clear tile 1,...,.
839 Pattern is as follows:
840 | 0,1 | 4,5 | 8,9 |12,13|16,17|20,21|24,25|28,29|
841 bits -------------------------------------------------
842 | 2,3 | 6,7 |10,11|14,15|18,19|22,23|26,27|30,31|
843 rv100: clearmask covers 2x8 4x1 tiles, but one clear still
844 covers 256 pixels ?!?
845 */
846 clearmask = 0x0;
847 }
848
849 BEGIN_RING( 8 );
850 RADEON_WAIT_UNTIL_2D_IDLE();
851 OUT_RING_REG( RADEON_RB3D_DEPTHCLEARVALUE,
852 tempRB3D_DEPTHCLEARVALUE);
853 /* what offset is this exactly ? */
854 OUT_RING_REG( RADEON_RB3D_ZMASKOFFSET, 0 );
855 /* need ctlstat, otherwise get some strange black flickering */
856 OUT_RING_REG( RADEON_RB3D_ZCACHE_CTLSTAT, RADEON_RB3D_ZC_FLUSH_ALL );
857 ADVANCE_RING();
858
859 for (i = 0; i < nbox; i++) {
860 int tileoffset, nrtilesx, nrtilesy, j;
861 /* it looks like r200 needs rv-style clears, at least if hierz is not enabled? */
862 if ((dev_priv->flags&CHIP_HAS_HIERZ) && !(dev_priv->microcode_version==UCODE_R200)) {
863 /* FIXME : figure this out for r200 (when hierz is enabled). Or
864 maybe r200 actually doesn't need to put the low-res z value into
865 the tile cache like r100, but just needs to clear the hi-level z-buffer?
866 Works for R100, both with hierz and without.
867 R100 seems to operate on 2x1 8x8 tiles, but...
868 odd: offset/nrtiles need to be 64 pix (4 block) aligned? Potentially
869 problematic with resolutions which are not 64 pix aligned? */
870 tileoffset = ((pbox[i].y1 >> 3) * depthpixperline + pbox[i].x1) >> 6;
871 nrtilesx = ((pbox[i].x2 & ~63) - (pbox[i].x1 & ~63)) >> 4;
872 nrtilesy = (pbox[i].y2 >> 3) - (pbox[i].y1 >> 3);
873 for (j = 0; j <= nrtilesy; j++) {
874 BEGIN_RING( 4 );
875 OUT_RING( CP_PACKET3( RADEON_3D_CLEAR_ZMASK, 2 ) );
876 /* first tile */
877 OUT_RING( tileoffset * 8 );
878 /* the number of tiles to clear */
879 OUT_RING( nrtilesx + 4 );
880 /* clear mask : chooses the clearing pattern. */
881 OUT_RING( clearmask );
882 ADVANCE_RING();
883 tileoffset += depthpixperline >> 6;
884 }
885 }
886 else if (dev_priv->microcode_version==UCODE_R200) {
887 /* works for rv250. */
888 /* find first macro tile (8x2 4x4 z-pixels on rv250) */
889 tileoffset = ((pbox[i].y1 >> 3) * depthpixperline + pbox[i].x1) >> 5;
890 nrtilesx = (pbox[i].x2 >> 5) - (pbox[i].x1 >> 5);
891 nrtilesy = (pbox[i].y2 >> 3) - (pbox[i].y1 >> 3);
892 for (j = 0; j <= nrtilesy; j++) {
893 BEGIN_RING( 4 );
894 OUT_RING( CP_PACKET3( RADEON_3D_CLEAR_ZMASK, 2 ) );
895 /* first tile */
896 /* judging by the first tile offset needed, could possibly
897 directly address/clear 4x4 tiles instead of 8x2 * 4x4
898 macro tiles, though would still need clear mask for
899 right/bottom if truely 4x4 granularity is desired ? */
900 OUT_RING( tileoffset * 16 );
901 /* the number of tiles to clear */
902 OUT_RING( nrtilesx + 1 );
903 /* clear mask : chooses the clearing pattern. */
904 OUT_RING( clearmask );
905 ADVANCE_RING();
906 tileoffset += depthpixperline >> 5;
907 }
908 }
909 else { /* rv 100 */
910 /* rv100 might not need 64 pix alignment, who knows */
911 /* offsets are, hmm, weird */
912 tileoffset = ((pbox[i].y1 >> 4) * depthpixperline + pbox[i].x1) >> 6;
913 nrtilesx = ((pbox[i].x2 & ~63) - (pbox[i].x1 & ~63)) >> 4;
914 nrtilesy = (pbox[i].y2 >> 4) - (pbox[i].y1 >> 4);
915 for (j = 0; j <= nrtilesy; j++) {
916 BEGIN_RING( 4 );
917 OUT_RING( CP_PACKET3( RADEON_3D_CLEAR_ZMASK, 2 ) );
918 OUT_RING( tileoffset * 128 );
919 /* the number of tiles to clear */
920 OUT_RING( nrtilesx + 4 );
921 /* clear mask : chooses the clearing pattern. */
922 OUT_RING( clearmask );
923 ADVANCE_RING();
924 tileoffset += depthpixperline >> 6;
925 }
926 }
927 }
928
929 /* TODO don't always clear all hi-level z tiles */
930 if ((dev_priv->flags & CHIP_HAS_HIERZ) && (dev_priv->microcode_version==UCODE_R200)
931 && (flags & RADEON_USE_HIERZ))
932 /* r100 and cards without hierarchical z-buffer have no high-level z-buffer */
933 /* FIXME : the mask supposedly contains low-res z values. So can't set
934 just to the max (0xff? or actually 0x3fff?), need to take z clear
935 value into account? */
936 {
937 BEGIN_RING( 4 );
938 OUT_RING( CP_PACKET3( RADEON_3D_CLEAR_HIZ, 2 ) );
939 OUT_RING( 0x0 ); /* First tile */
940 OUT_RING( 0x3cc0 );
941 OUT_RING( (0xff<<22)|(0xff<<6)| 0x003f003f);
942 ADVANCE_RING();
943 }
944 }
945
David Dawesab87c5d2002-02-14 02:00:26 +0000946 /* We have to clear the depth and/or stencil buffers by
947 * rendering a quad into just those buffers. Thus, we have to
948 * make sure the 3D engine is configured correctly.
949 */
Roland Scheideggerc4a87c62004-12-08 16:43:00 +0000950 else if ((dev_priv->microcode_version == UCODE_R200) &&
951 (flags & (RADEON_DEPTH | RADEON_STENCIL))) {
Keith Whitwell48cc3502002-08-26 22:16:18 +0000952
953 int tempPP_CNTL;
954 int tempRE_CNTL;
955 int tempRB3D_CNTL;
956 int tempRB3D_ZSTENCILCNTL;
957 int tempRB3D_STENCILREFMASK;
958 int tempRB3D_PLANEMASK;
959 int tempSE_CNTL;
960 int tempSE_VTE_CNTL;
961 int tempSE_VTX_FMT_0;
962 int tempSE_VTX_FMT_1;
963 int tempSE_VAP_CNTL;
964 int tempRE_AUX_SCISSOR_CNTL;
965
966 tempPP_CNTL = 0;
967 tempRE_CNTL = 0;
968
969 tempRB3D_CNTL = depth_clear->rb3d_cntl;
Keith Whitwell48cc3502002-08-26 22:16:18 +0000970
971 tempRB3D_ZSTENCILCNTL = depth_clear->rb3d_zstencilcntl;
972 tempRB3D_STENCILREFMASK = 0x0;
973
974 tempSE_CNTL = depth_clear->se_cntl;
975
Keith Whitwell48cc3502002-08-26 22:16:18 +0000976 /* Disable TCL */
977
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000978 tempSE_VAP_CNTL = ( /* SE_VAP_CNTL__FORCE_W_TO_ONE_MASK | */
979 (0x9 <<
980 SE_VAP_CNTL__VF_MAX_VTX_NUM__SHIFT));
Keith Whitwell48cc3502002-08-26 22:16:18 +0000981
982 tempRB3D_PLANEMASK = 0x0;
983
984 tempRE_AUX_SCISSOR_CNTL = 0x0;
985
986 tempSE_VTE_CNTL =
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000987 SE_VTE_CNTL__VTX_XY_FMT_MASK | SE_VTE_CNTL__VTX_Z_FMT_MASK;
Keith Whitwell48cc3502002-08-26 22:16:18 +0000988
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000989 /* Vertex format (X, Y, Z, W) */
Keith Whitwell48cc3502002-08-26 22:16:18 +0000990 tempSE_VTX_FMT_0 =
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000991 SE_VTX_FMT_0__VTX_Z0_PRESENT_MASK |
992 SE_VTX_FMT_0__VTX_W0_PRESENT_MASK;
Keith Whitwell48cc3502002-08-26 22:16:18 +0000993 tempSE_VTX_FMT_1 = 0x0;
994
Jon Smirl9f9a8f12004-09-30 21:12:10 +0000995 /*
996 * Depth buffer specific enables
Keith Whitwell48cc3502002-08-26 22:16:18 +0000997 */
998 if (flags & RADEON_DEPTH) {
999 /* Enable depth buffer */
1000 tempRB3D_CNTL |= RADEON_Z_ENABLE;
1001 } else {
1002 /* Disable depth buffer */
1003 tempRB3D_CNTL &= ~RADEON_Z_ENABLE;
1004 }
1005
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001006 /*
Keith Whitwell48cc3502002-08-26 22:16:18 +00001007 * Stencil buffer specific enables
1008 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001009 if (flags & RADEON_STENCIL) {
1010 tempRB3D_CNTL |= RADEON_STENCIL_ENABLE;
1011 tempRB3D_STENCILREFMASK = clear->depth_mask;
Keith Whitwell48cc3502002-08-26 22:16:18 +00001012 } else {
1013 tempRB3D_CNTL &= ~RADEON_STENCIL_ENABLE;
1014 tempRB3D_STENCILREFMASK = 0x00000000;
1015 }
1016
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001017 if (flags & RADEON_USE_COMP_ZBUF) {
1018 tempRB3D_ZSTENCILCNTL |= RADEON_Z_COMPRESSION_ENABLE |
1019 RADEON_Z_DECOMPRESSION_ENABLE;
1020 }
1021 if (flags & RADEON_USE_HIERZ) {
1022 tempRB3D_ZSTENCILCNTL |= RADEON_Z_HIERARCHY_ENABLE;
1023 }
1024
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001025 BEGIN_RING(26);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001026 RADEON_WAIT_UNTIL_2D_IDLE();
1027
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001028 OUT_RING_REG(RADEON_PP_CNTL, tempPP_CNTL);
1029 OUT_RING_REG(R200_RE_CNTL, tempRE_CNTL);
1030 OUT_RING_REG(RADEON_RB3D_CNTL, tempRB3D_CNTL);
1031 OUT_RING_REG(RADEON_RB3D_ZSTENCILCNTL, tempRB3D_ZSTENCILCNTL);
1032 OUT_RING_REG(RADEON_RB3D_STENCILREFMASK,
1033 tempRB3D_STENCILREFMASK);
1034 OUT_RING_REG(RADEON_RB3D_PLANEMASK, tempRB3D_PLANEMASK);
1035 OUT_RING_REG(RADEON_SE_CNTL, tempSE_CNTL);
1036 OUT_RING_REG(R200_SE_VTE_CNTL, tempSE_VTE_CNTL);
1037 OUT_RING_REG(R200_SE_VTX_FMT_0, tempSE_VTX_FMT_0);
1038 OUT_RING_REG(R200_SE_VTX_FMT_1, tempSE_VTX_FMT_1);
1039 OUT_RING_REG(R200_SE_VAP_CNTL, tempSE_VAP_CNTL);
1040 OUT_RING_REG(R200_RE_AUX_SCISSOR_CNTL, tempRE_AUX_SCISSOR_CNTL);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001041 ADVANCE_RING();
1042
1043 /* Make sure we restore the 3D state next time.
1044 */
1045 dev_priv->sarea_priv->ctx_owner = 0;
1046
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001047 for (i = 0; i < nbox; i++) {
1048
1049 /* Funny that this should be required --
Keith Whitwell48cc3502002-08-26 22:16:18 +00001050 * sets top-left?
1051 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001052 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001053
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001054 BEGIN_RING(14);
1055 OUT_RING(CP_PACKET3(R200_3D_DRAW_IMMD_2, 12));
1056 OUT_RING((RADEON_PRIM_TYPE_RECT_LIST |
1057 RADEON_PRIM_WALK_RING |
1058 (3 << RADEON_NUM_VERTICES_SHIFT)));
1059 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1060 OUT_RING(depth_boxes[i].ui[CLEAR_Y1]);
1061 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1062 OUT_RING(0x3f800000);
1063 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1064 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1065 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1066 OUT_RING(0x3f800000);
1067 OUT_RING(depth_boxes[i].ui[CLEAR_X2]);
1068 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1069 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1070 OUT_RING(0x3f800000);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001071 ADVANCE_RING();
1072 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001073 } else if ((flags & (RADEON_DEPTH | RADEON_STENCIL))) {
Keith Whitwell48cc3502002-08-26 22:16:18 +00001074
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001075 int tempRB3D_ZSTENCILCNTL = depth_clear->rb3d_zstencilcntl;
1076
David Dawesab87c5d2002-02-14 02:00:26 +00001077 rb3d_cntl = depth_clear->rb3d_cntl;
1078
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001079 if (flags & RADEON_DEPTH) {
1080 rb3d_cntl |= RADEON_Z_ENABLE;
David Dawesab87c5d2002-02-14 02:00:26 +00001081 } else {
1082 rb3d_cntl &= ~RADEON_Z_ENABLE;
1083 }
1084
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001085 if (flags & RADEON_STENCIL) {
1086 rb3d_cntl |= RADEON_STENCIL_ENABLE;
1087 rb3d_stencilrefmask = clear->depth_mask; /* misnamed field */
David Dawesab87c5d2002-02-14 02:00:26 +00001088 } else {
1089 rb3d_cntl &= ~RADEON_STENCIL_ENABLE;
1090 rb3d_stencilrefmask = 0x00000000;
1091 }
David Dawesab87c5d2002-02-14 02:00:26 +00001092
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001093 if (flags & RADEON_USE_COMP_ZBUF) {
1094 tempRB3D_ZSTENCILCNTL |= RADEON_Z_COMPRESSION_ENABLE |
1095 RADEON_Z_DECOMPRESSION_ENABLE;
1096 }
1097 if (flags & RADEON_USE_HIERZ) {
1098 tempRB3D_ZSTENCILCNTL |= RADEON_Z_HIERARCHY_ENABLE;
1099 }
1100
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001101 BEGIN_RING(13);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001102 RADEON_WAIT_UNTIL_2D_IDLE();
Kevin E Martin0994e632001-01-05 22:57:55 +00001103
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001104 OUT_RING(CP_PACKET0(RADEON_PP_CNTL, 1));
1105 OUT_RING(0x00000000);
1106 OUT_RING(rb3d_cntl);
1107
Roland Scheideggerc4a87c62004-12-08 16:43:00 +00001108 OUT_RING_REG(RADEON_RB3D_ZSTENCILCNTL, tempRB3D_ZSTENCILCNTL);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001109 OUT_RING_REG(RADEON_RB3D_STENCILREFMASK, rb3d_stencilrefmask);
1110 OUT_RING_REG(RADEON_RB3D_PLANEMASK, 0x00000000);
1111 OUT_RING_REG(RADEON_SE_CNTL, depth_clear->se_cntl);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001112 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001113
Keith Whitwell48cc3502002-08-26 22:16:18 +00001114 /* Make sure we restore the 3D state next time.
1115 */
1116 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001117
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001118 for (i = 0; i < nbox; i++) {
1119
1120 /* Funny that this should be required --
Keith Whitwell48cc3502002-08-26 22:16:18 +00001121 * sets top-left?
Kevin E Martin0994e632001-01-05 22:57:55 +00001122 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001123 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Kevin E Martin0994e632001-01-05 22:57:55 +00001124
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001125 BEGIN_RING(15);
Kevin E Martin0994e632001-01-05 22:57:55 +00001126
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001127 OUT_RING(CP_PACKET3(RADEON_3D_DRAW_IMMD, 13));
1128 OUT_RING(RADEON_VTX_Z_PRESENT |
1129 RADEON_VTX_PKCOLOR_PRESENT);
1130 OUT_RING((RADEON_PRIM_TYPE_RECT_LIST |
1131 RADEON_PRIM_WALK_RING |
1132 RADEON_MAOS_ENABLE |
1133 RADEON_VTX_FMT_RADEON_MODE |
1134 (3 << RADEON_NUM_VERTICES_SHIFT)));
Kevin E Martin0994e632001-01-05 22:57:55 +00001135
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001136 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1137 OUT_RING(depth_boxes[i].ui[CLEAR_Y1]);
1138 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1139 OUT_RING(0x0);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001140
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001141 OUT_RING(depth_boxes[i].ui[CLEAR_X1]);
1142 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1143 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1144 OUT_RING(0x0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001145
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001146 OUT_RING(depth_boxes[i].ui[CLEAR_X2]);
1147 OUT_RING(depth_boxes[i].ui[CLEAR_Y2]);
1148 OUT_RING(depth_boxes[i].ui[CLEAR_DEPTH]);
1149 OUT_RING(0x0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001150
1151 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001152 }
1153 }
1154
1155 /* Increment the clear counter. The client-side 3D driver must
1156 * wait on this value before performing the clear ioctl. We
1157 * need this because the card's so damned fast...
1158 */
1159 dev_priv->sarea_priv->last_clear++;
1160
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001161 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001162
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001163 RADEON_CLEAR_AGE(dev_priv->sarea_priv->last_clear);
Kevin E Martin0994e632001-01-05 22:57:55 +00001164 RADEON_WAIT_UNTIL_IDLE();
1165
1166 ADVANCE_RING();
1167}
1168
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001169static void radeon_cp_dispatch_swap(drm_device_t * dev)
Kevin E Martin0994e632001-01-05 22:57:55 +00001170{
1171 drm_radeon_private_t *dev_priv = dev->dev_private;
1172 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1173 int nbox = sarea_priv->nbox;
1174 drm_clip_rect_t *pbox = sarea_priv->boxes;
1175 int i;
1176 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001177 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00001178
Kevin E Martin0994e632001-01-05 22:57:55 +00001179 /* Do some trivial performance monitoring...
1180 */
Keith Whitwell48cc3502002-08-26 22:16:18 +00001181 if (dev_priv->do_boxes)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001182 radeon_cp_performance_boxes(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00001183
1184 /* Wait for the 3D stream to idle before dispatching the bitblt.
1185 * This will prevent data corruption between the two streams.
1186 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001187 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00001188
1189 RADEON_WAIT_UNTIL_3D_IDLE();
1190
1191 ADVANCE_RING();
1192
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001193 for (i = 0; i < nbox; i++) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001194 int x = pbox[i].x1;
1195 int y = pbox[i].y1;
1196 int w = pbox[i].x2 - x;
1197 int h = pbox[i].y2 - y;
1198
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001199 DRM_DEBUG("dispatch swap %d,%d-%d,%d\n", x, y, w, h);
Kevin E Martin0994e632001-01-05 22:57:55 +00001200
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001201 BEGIN_RING(7);
Kevin E Martin0994e632001-01-05 22:57:55 +00001202
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001203 OUT_RING(CP_PACKET3(RADEON_CNTL_BITBLT_MULTI, 5));
1204 OUT_RING(RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
1205 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
1206 RADEON_GMC_BRUSH_NONE |
1207 (dev_priv->color_fmt << 8) |
1208 RADEON_GMC_SRC_DATATYPE_COLOR |
1209 RADEON_ROP3_S |
1210 RADEON_DP_SRC_SOURCE_MEMORY |
1211 RADEON_GMC_CLR_CMP_CNTL_DIS | RADEON_GMC_WR_MSK_DIS);
1212
Keith Whitwell2dcada32002-06-12 15:50:28 +00001213 /* Make this work even if front & back are flipped:
1214 */
1215 if (dev_priv->current_page == 0) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001216 OUT_RING(dev_priv->back_pitch_offset);
1217 OUT_RING(dev_priv->front_pitch_offset);
1218 } else {
1219 OUT_RING(dev_priv->front_pitch_offset);
1220 OUT_RING(dev_priv->back_pitch_offset);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001221 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001222
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001223 OUT_RING((x << 16) | y);
1224 OUT_RING((x << 16) | y);
1225 OUT_RING((w << 16) | h);
Kevin E Martin0994e632001-01-05 22:57:55 +00001226
1227 ADVANCE_RING();
1228 }
1229
1230 /* Increment the frame counter. The client-side 3D driver must
1231 * throttle the framerate by waiting for this value before
1232 * performing the swapbuffer ioctl.
1233 */
1234 dev_priv->sarea_priv->last_frame++;
1235
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001236 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001237
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001238 RADEON_FRAME_AGE(dev_priv->sarea_priv->last_frame);
Kevin E Martin0994e632001-01-05 22:57:55 +00001239 RADEON_WAIT_UNTIL_2D_IDLE();
1240
1241 ADVANCE_RING();
1242}
1243
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001244static void radeon_cp_dispatch_flip(drm_device_t * dev)
Kevin E Martin0994e632001-01-05 22:57:55 +00001245{
1246 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001247 drm_sarea_t *sarea = (drm_sarea_t *) dev_priv->sarea->handle;
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001248 int offset = (dev_priv->current_page == 1)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001249 ? dev_priv->front_offset : dev_priv->back_offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001250 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001251 DRM_DEBUG("%s: page=%d pfCurrentPage=%d\n",
1252 __FUNCTION__,
1253 dev_priv->current_page, dev_priv->sarea_priv->pfCurrentPage);
Kevin E Martin0994e632001-01-05 22:57:55 +00001254
Kevin E Martin0994e632001-01-05 22:57:55 +00001255 /* Do some trivial performance monitoring...
1256 */
Keith Whitwell48cc3502002-08-26 22:16:18 +00001257 if (dev_priv->do_boxes) {
1258 dev_priv->stats.boxes |= RADEON_BOX_FLIP;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001259 radeon_cp_performance_boxes(dev_priv);
Keith Whitwell48cc3502002-08-26 22:16:18 +00001260 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001261
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001262 /* Update the frame offsets for both CRTCs
1263 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001264 BEGIN_RING(6);
Kevin E Martin0994e632001-01-05 22:57:55 +00001265
1266 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001267 OUT_RING_REG(RADEON_CRTC_OFFSET,
1268 ((sarea->frame.y * dev_priv->front_pitch +
1269 sarea->frame.x * (dev_priv->color_fmt - 2)) & ~7)
1270 + offset);
1271 OUT_RING_REG(RADEON_CRTC2_OFFSET, dev_priv->sarea_priv->crtc2_base
1272 + offset);
Kevin E Martin0994e632001-01-05 22:57:55 +00001273
1274 ADVANCE_RING();
1275
1276 /* Increment the frame counter. The client-side 3D driver must
1277 * throttle the framerate by waiting for this value before
1278 * performing the swapbuffer ioctl.
1279 */
1280 dev_priv->sarea_priv->last_frame++;
Michel Daenzer5e1b8ed2002-10-29 13:49:26 +00001281 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page =
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001282 1 - dev_priv->current_page;
Kevin E Martin0994e632001-01-05 22:57:55 +00001283
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001284 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00001285
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001286 RADEON_FRAME_AGE(dev_priv->sarea_priv->last_frame);
Kevin E Martin0994e632001-01-05 22:57:55 +00001287
1288 ADVANCE_RING();
1289}
1290
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001291static int bad_prim_vertex_nr(int primitive, int nr)
Keith Whitwell2dcada32002-06-12 15:50:28 +00001292{
1293 switch (primitive & RADEON_PRIM_TYPE_MASK) {
1294 case RADEON_PRIM_TYPE_NONE:
1295 case RADEON_PRIM_TYPE_POINT:
1296 return nr < 1;
1297 case RADEON_PRIM_TYPE_LINE:
1298 return (nr & 1) || nr == 0;
1299 case RADEON_PRIM_TYPE_LINE_STRIP:
1300 return nr < 2;
1301 case RADEON_PRIM_TYPE_TRI_LIST:
1302 case RADEON_PRIM_TYPE_3VRT_POINT_LIST:
1303 case RADEON_PRIM_TYPE_3VRT_LINE_LIST:
1304 case RADEON_PRIM_TYPE_RECT_LIST:
1305 return nr % 3 || nr == 0;
1306 case RADEON_PRIM_TYPE_TRI_FAN:
1307 case RADEON_PRIM_TYPE_TRI_STRIP:
1308 return nr < 3;
1309 default:
1310 return 1;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001311 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00001312}
1313
Keith Whitwell2dcada32002-06-12 15:50:28 +00001314typedef struct {
1315 unsigned int start;
1316 unsigned int finish;
1317 unsigned int prim;
1318 unsigned int numverts;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001319 unsigned int offset;
1320 unsigned int vc_format;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001321} drm_radeon_tcl_prim_t;
David Dawesab87c5d2002-02-14 02:00:26 +00001322
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001323static void radeon_cp_dispatch_vertex(drm_device_t * dev,
1324 drm_buf_t * buf,
1325 drm_radeon_tcl_prim_t * prim)
David Dawesab87c5d2002-02-14 02:00:26 +00001326{
1327 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001328 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Michel Daenzer062751a2003-08-26 15:44:01 +00001329 int offset = dev_priv->gart_buffers_offset + buf->offset + prim->start;
David Dawesab87c5d2002-02-14 02:00:26 +00001330 int numverts = (int)prim->numverts;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001331 int nbox = sarea_priv->nbox;
David Dawesab87c5d2002-02-14 02:00:26 +00001332 int i = 0;
1333 RING_LOCALS;
1334
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001335 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d %d verts\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00001336 prim->prim,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001337 prim->vc_format, prim->start, prim->finish, prim->numverts);
David Dawesab87c5d2002-02-14 02:00:26 +00001338
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001339 if (bad_prim_vertex_nr(prim->prim, prim->numverts)) {
1340 DRM_ERROR("bad prim %x numverts %d\n",
1341 prim->prim, prim->numverts);
Keith Whitwellbaef0862002-03-08 16:03:37 +00001342 return;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001343 }
David Dawesab87c5d2002-02-14 02:00:26 +00001344
1345 do {
1346 /* Emit the next cliprect */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001347 if (i < nbox) {
1348 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
David Dawesab87c5d2002-02-14 02:00:26 +00001349 }
1350
1351 /* Emit the vertex buffer rendering commands */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001352 BEGIN_RING(5);
David Dawesab87c5d2002-02-14 02:00:26 +00001353
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001354 OUT_RING(CP_PACKET3(RADEON_3D_RNDR_GEN_INDX_PRIM, 3));
1355 OUT_RING(offset);
1356 OUT_RING(numverts);
1357 OUT_RING(prim->vc_format);
1358 OUT_RING(prim->prim | RADEON_PRIM_WALK_LIST |
1359 RADEON_COLOR_ORDER_RGBA |
1360 RADEON_VTX_FMT_RADEON_MODE |
1361 (numverts << RADEON_NUM_VERTICES_SHIFT));
David Dawesab87c5d2002-02-14 02:00:26 +00001362
1363 ADVANCE_RING();
1364
1365 i++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001366 } while (i < nbox);
David Dawesab87c5d2002-02-14 02:00:26 +00001367}
1368
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001369static void radeon_cp_discard_buffer(drm_device_t * dev, drm_buf_t * buf)
Kevin E Martin0994e632001-01-05 22:57:55 +00001370{
1371 drm_radeon_private_t *dev_priv = dev->dev_private;
1372 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001373 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +00001374
Keith Whitwell2dcada32002-06-12 15:50:28 +00001375 buf_priv->age = ++dev_priv->sarea_priv->last_dispatch;
Kevin E Martin0994e632001-01-05 22:57:55 +00001376
David Dawesab87c5d2002-02-14 02:00:26 +00001377 /* Emit the vertex buffer age */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001378 BEGIN_RING(2);
1379 RADEON_DISPATCH_AGE(buf_priv->age);
David Dawesab87c5d2002-02-14 02:00:26 +00001380 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001381
David Dawesab87c5d2002-02-14 02:00:26 +00001382 buf->pending = 1;
1383 buf->used = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001384}
1385
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001386static void radeon_cp_dispatch_indirect(drm_device_t * dev,
1387 drm_buf_t * buf, int start, int end)
Kevin E Martin0994e632001-01-05 22:57:55 +00001388{
1389 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001390 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001391 DRM_DEBUG("indirect: buf=%d s=0x%x e=0x%x\n", buf->idx, start, end);
Kevin E Martin0994e632001-01-05 22:57:55 +00001392
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001393 if (start != end) {
Michel Daenzer062751a2003-08-26 15:44:01 +00001394 int offset = (dev_priv->gart_buffers_offset
Kevin E Martin0994e632001-01-05 22:57:55 +00001395 + buf->offset + start);
1396 int dwords = (end - start + 3) / sizeof(u32);
1397
1398 /* Indirect buffer data must be an even number of
1399 * dwords, so if we've been given an odd number we must
1400 * pad the data with a Type-2 CP packet.
1401 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001402 if (dwords & 1) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001403 u32 *data = (u32 *)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001404 ((char *)dev->agp_buffer_map->handle
1405 + buf->offset + start);
Kevin E Martin0994e632001-01-05 22:57:55 +00001406 data[dwords++] = RADEON_CP_PACKET2;
1407 }
1408
Kevin E Martin0994e632001-01-05 22:57:55 +00001409 /* Fire off the indirect buffer */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001410 BEGIN_RING(3);
Kevin E Martin0994e632001-01-05 22:57:55 +00001411
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001412 OUT_RING(CP_PACKET0(RADEON_CP_IB_BASE, 1));
1413 OUT_RING(offset);
1414 OUT_RING(dwords);
Kevin E Martin0994e632001-01-05 22:57:55 +00001415
1416 ADVANCE_RING();
1417 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001418}
1419
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001420static void radeon_cp_dispatch_indices(drm_device_t * dev,
1421 drm_buf_t * elt_buf,
1422 drm_radeon_tcl_prim_t * prim)
Kevin E Martin0994e632001-01-05 22:57:55 +00001423{
1424 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001425 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Michel Daenzer062751a2003-08-26 15:44:01 +00001426 int offset = dev_priv->gart_buffers_offset + prim->offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001427 u32 *data;
1428 int dwords;
1429 int i = 0;
David Dawesab87c5d2002-02-14 02:00:26 +00001430 int start = prim->start + RADEON_INDEX_PRIM_OFFSET;
1431 int count = (prim->finish - start) / sizeof(u16);
Keith Whitwell9e7d6172003-06-16 10:40:52 +00001432 int nbox = sarea_priv->nbox;
Kevin E Martin0994e632001-01-05 22:57:55 +00001433
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001434 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d offset: %x nr %d\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00001435 prim->prim,
1436 prim->vc_format,
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001437 prim->start, prim->finish, prim->offset, prim->numverts);
Kevin E Martin0994e632001-01-05 22:57:55 +00001438
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001439 if (bad_prim_vertex_nr(prim->prim, count)) {
1440 DRM_ERROR("bad prim %x count %d\n", prim->prim, count);
Keith Whitwellbaef0862002-03-08 16:03:37 +00001441 return;
Kevin E Martin0994e632001-01-05 22:57:55 +00001442 }
1443
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001444 if (start >= prim->finish || (prim->start & 0x7)) {
1445 DRM_ERROR("buffer prim %d\n", prim->prim);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001446 return;
1447 }
1448
1449 dwords = (prim->finish - prim->start + 3) / sizeof(u32);
1450
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001451 data = (u32 *) ((char *)dev->agp_buffer_map->handle +
1452 elt_buf->offset + prim->start);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001453
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001454 data[0] = CP_PACKET3(RADEON_3D_RNDR_GEN_INDX_PRIM, dwords - 2);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001455 data[1] = offset;
1456 data[2] = prim->numverts;
1457 data[3] = prim->vc_format;
1458 data[4] = (prim->prim |
1459 RADEON_PRIM_WALK_IND |
1460 RADEON_COLOR_ORDER_RGBA |
1461 RADEON_VTX_FMT_RADEON_MODE |
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001462 (count << RADEON_NUM_VERTICES_SHIFT));
Keith Whitwell2dcada32002-06-12 15:50:28 +00001463
1464 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001465 if (i < nbox)
1466 radeon_emit_clip_rect(dev_priv, &sarea_priv->boxes[i]);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001467
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001468 radeon_cp_dispatch_indirect(dev, elt_buf,
1469 prim->start, prim->finish);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001470
1471 i++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001472 } while (i < nbox);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001473
Kevin E Martin0994e632001-01-05 22:57:55 +00001474}
1475
Eric Anholtab59dd22005-07-20 21:17:47 +00001476#define RADEON_MAX_TEXTURE_SIZE RADEON_BUFFER_SIZE
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001477
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001478static int radeon_cp_dispatch_texture(DRMFILE filp,
1479 drm_device_t * dev,
1480 drm_radeon_texture_t * tex,
1481 drm_radeon_tex_image_t * image)
Kevin E Martin0994e632001-01-05 22:57:55 +00001482{
1483 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001484 drm_file_t *filp_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00001485 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00001486 u32 format;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001487 u32 *buffer;
Dave Airlie02df04d2004-07-25 08:47:38 +00001488 const u8 __user *data;
Eric Anholtab59dd22005-07-20 21:17:47 +00001489 int size, dwords, tex_width, blit_width, spitch;
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001490 u32 height;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001491 int i;
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001492 u32 texpitch, microtile;
Eric Anholtab59dd22005-07-20 21:17:47 +00001493 u32 offset;
Kevin E Martin0994e632001-01-05 22:57:55 +00001494 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +00001495
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001496 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001497
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001498 if (radeon_check_and_fixup_offset(dev_priv, filp_priv, &tex->offset)) {
1499 DRM_ERROR("Invalid destination offset\n");
1500 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00001501 }
1502
Keith Whitwell48cc3502002-08-26 22:16:18 +00001503 dev_priv->stats.boxes |= RADEON_BOX_TEXTURE_LOAD;
1504
Keith Whitwellb03fa552002-12-06 12:22:43 +00001505 /* Flush the pixel cache. This ensures no pixel data gets mixed
1506 * up with the texture data from the host data blit, otherwise
1507 * part of the texture image may be corrupted.
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001508 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001509 BEGIN_RING(4);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001510 RADEON_FLUSH_CACHE();
1511 RADEON_WAIT_UNTIL_IDLE();
1512 ADVANCE_RING();
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001513
Kevin E Martin0994e632001-01-05 22:57:55 +00001514 /* The compiler won't optimize away a division by a variable,
1515 * even if the only legal values are powers of two. Thus, we'll
1516 * use a shift instead.
1517 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001518 switch (tex->format) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001519 case RADEON_TXFORMAT_ARGB8888:
1520 case RADEON_TXFORMAT_RGBA8888:
Kevin E Martin0994e632001-01-05 22:57:55 +00001521 format = RADEON_COLOR_FORMAT_ARGB8888;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001522 tex_width = tex->width * 4;
1523 blit_width = image->width * 4;
Kevin E Martin0994e632001-01-05 22:57:55 +00001524 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001525 case RADEON_TXFORMAT_AI88:
1526 case RADEON_TXFORMAT_ARGB1555:
1527 case RADEON_TXFORMAT_RGB565:
1528 case RADEON_TXFORMAT_ARGB4444:
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00001529 case RADEON_TXFORMAT_VYUY422:
1530 case RADEON_TXFORMAT_YVYU422:
Kevin E Martin0994e632001-01-05 22:57:55 +00001531 format = RADEON_COLOR_FORMAT_RGB565;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001532 tex_width = tex->width * 2;
1533 blit_width = image->width * 2;
Kevin E Martin0994e632001-01-05 22:57:55 +00001534 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001535 case RADEON_TXFORMAT_I8:
1536 case RADEON_TXFORMAT_RGB332:
Kevin E Martin0994e632001-01-05 22:57:55 +00001537 format = RADEON_COLOR_FORMAT_CI8;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001538 tex_width = tex->width * 1;
1539 blit_width = image->width * 1;
Kevin E Martin0994e632001-01-05 22:57:55 +00001540 break;
1541 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001542 DRM_ERROR("invalid texture format %d\n", tex->format);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001543 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001544 }
Eric Anholtab59dd22005-07-20 21:17:47 +00001545 spitch = blit_width >> 6;
1546 if (spitch == 0 && image->height > 1)
1547 return DRM_ERR(EINVAL);
1548
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001549 texpitch = tex->pitch;
1550 if ((texpitch << 22) & RADEON_DST_TILE_MICRO) {
1551 microtile = 1;
1552 if (tex_width < 64) {
1553 texpitch &= ~(RADEON_DST_TILE_MICRO >> 22);
1554 /* we got tiled coordinates, untile them */
1555 image->x *= 2;
1556 }
1557 }
1558 else microtile = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001559
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001560 DRM_DEBUG("tex=%dx%d blit=%d\n", tex_width, tex->height, blit_width);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001561
Keith Whitwellb03fa552002-12-06 12:22:43 +00001562 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001563 DRM_DEBUG("tex: ofs=0x%x p=%d f=%d x=%hd y=%hd w=%hd h=%hd\n",
1564 tex->offset >> 10, tex->pitch, tex->format,
1565 image->x, image->y, image->width, image->height);
Kevin E Martin0994e632001-01-05 22:57:55 +00001566
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001567 /* Make a copy of some parameters in case we have to
Keith Whitwellb03fa552002-12-06 12:22:43 +00001568 * update them for a multi-pass texture blit.
1569 */
Keith Whitwellb03fa552002-12-06 12:22:43 +00001570 height = image->height;
Dave Airlie02df04d2004-07-25 08:47:38 +00001571 data = (const u8 __user *)image->data;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001572
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001573 size = height * blit_width;
1574
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001575 if (size > RADEON_MAX_TEXTURE_SIZE) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001576 height = RADEON_MAX_TEXTURE_SIZE / blit_width;
1577 size = height * blit_width;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001578 } else if (size < 4 && size > 0) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001579 size = 4;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001580 } else if (size == 0) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001581 return 0;
1582 }
1583
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001584 buf = radeon_freelist_get(dev);
1585 if (0 && !buf) {
1586 radeon_do_cp_idle(dev_priv);
1587 buf = radeon_freelist_get(dev);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001588 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001589 if (!buf) {
Keith Whitwellb03fa552002-12-06 12:22:43 +00001590 DRM_DEBUG("radeon_cp_dispatch_texture: EAGAIN\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001591 if (DRM_COPY_TO_USER(tex->image, image, sizeof(*image)))
Dave Airlie7809efc2004-08-30 09:01:50 +00001592 return DRM_ERR(EFAULT);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001593 return DRM_ERR(EAGAIN);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001594 }
Keith Whitwellb03fa552002-12-06 12:22:43 +00001595
Keith Whitwellb03fa552002-12-06 12:22:43 +00001596 /* Dispatch the indirect buffer.
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001597 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001598 buffer =
1599 (u32 *) ((char *)dev->agp_buffer_map->handle + buf->offset);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001600 dwords = size / 4;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001601
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001602 if (microtile) {
1603 /* texture micro tiling in use, minimum texture width is thus 16 bytes.
1604 however, we cannot use blitter directly for texture width < 64 bytes,
1605 since minimum tex pitch is 64 bytes and we need this to match
1606 the texture width, otherwise the blitter will tile it wrong.
1607 Thus, tiling manually in this case. Additionally, need to special
1608 case tex height = 1, since our actual image will have height 2
1609 and we need to ensure we don't read beyond the texture size
1610 from user space. */
1611 if (tex->height == 1) {
1612 if (tex_width >= 64 || tex_width <= 16) {
1613 if (DRM_COPY_FROM_USER(buffer, data,
1614 tex_width * sizeof(u32))) {
1615 DRM_ERROR("EFAULT on pad, %d bytes\n",
1616 tex_width);
1617 return DRM_ERR(EFAULT);
1618 }
1619 } else if (tex_width == 32) {
1620 if (DRM_COPY_FROM_USER(buffer, data, 16)) {
1621 DRM_ERROR("EFAULT on pad, %d bytes\n",
1622 tex_width);
1623 return DRM_ERR(EFAULT);
1624 }
1625 if (DRM_COPY_FROM_USER(buffer + 8, data + 16, 16)) {
1626 DRM_ERROR("EFAULT on pad, %d bytes\n",
1627 tex_width);
1628 return DRM_ERR(EFAULT);
1629 }
1630 }
1631 } else if (tex_width >= 64 || tex_width == 16) {
1632 if (DRM_COPY_FROM_USER(buffer, data,
1633 dwords * sizeof(u32))) {
1634 DRM_ERROR("EFAULT on data, %d dwords\n",
1635 dwords);
Keith Whitwellb03fa552002-12-06 12:22:43 +00001636 return DRM_ERR(EFAULT);
1637 }
Roland Scheidegger732cdc52005-02-10 19:22:43 +00001638 } else if (tex_width < 16) {
1639 for (i = 0; i < tex->height; i++) {
1640 if (DRM_COPY_FROM_USER(buffer, data, tex_width)) {
1641 DRM_ERROR("EFAULT on pad, %d bytes\n",
1642 tex_width);
1643 return DRM_ERR(EFAULT);
1644 }
1645 buffer += 4;
1646 data += tex_width;
1647 }
1648 } else if (tex_width == 32) {
1649 /* TODO: make sure this works when not fitting in one buffer
1650 (i.e. 32bytes x 2048...) */
1651 for (i = 0; i < tex->height; i += 2) {
1652 if (DRM_COPY_FROM_USER(buffer, data, 16)) {
1653 DRM_ERROR("EFAULT on pad, %d bytes\n",
1654 tex_width);
1655 return DRM_ERR(EFAULT);
1656 }
1657 data += 16;
1658 if (DRM_COPY_FROM_USER(buffer + 8, data, 16)) {
1659 DRM_ERROR("EFAULT on pad, %d bytes\n",
1660 tex_width);
1661 return DRM_ERR(EFAULT);
1662 }
1663 data += 16;
1664 if (DRM_COPY_FROM_USER(buffer + 4, data, 16)) {
1665 DRM_ERROR("EFAULT on pad, %d bytes\n",
1666 tex_width);
1667 return DRM_ERR(EFAULT);
1668 }
1669 data += 16;
1670 if (DRM_COPY_FROM_USER(buffer + 12, data, 16)) {
1671 DRM_ERROR("EFAULT on pad, %d bytes\n",
1672 tex_width);
1673 return DRM_ERR(EFAULT);
1674 }
1675 data += 16;
1676 buffer += 16;
1677 }
1678 }
1679 }
1680 else {
1681 if (tex_width >= 32) {
1682 /* Texture image width is larger than the minimum, so we
1683 * can upload it directly.
1684 */
1685 if (DRM_COPY_FROM_USER(buffer, data,
1686 dwords * sizeof(u32))) {
1687 DRM_ERROR("EFAULT on data, %d dwords\n",
1688 dwords);
1689 return DRM_ERR(EFAULT);
1690 }
1691 } else {
1692 /* Texture image width is less than the minimum, so we
1693 * need to pad out each image scanline to the minimum
1694 * width.
1695 */
1696 for (i = 0; i < tex->height; i++) {
1697 if (DRM_COPY_FROM_USER(buffer, data, tex_width)) {
1698 DRM_ERROR("EFAULT on pad, %d bytes\n",
1699 tex_width);
1700 return DRM_ERR(EFAULT);
1701 }
1702 buffer += 8;
1703 data += tex_width;
1704 }
Keith Whitwellb03fa552002-12-06 12:22:43 +00001705 }
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001706 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001707
Keith Whitwell1728bc62003-03-28 14:27:37 +00001708 buf->filp = filp;
Eric Anholtab59dd22005-07-20 21:17:47 +00001709 buf->used = size;
1710 offset = dev_priv->gart_buffers_offset + buf->offset;
1711 BEGIN_RING(9);
1712 OUT_RING(CP_PACKET3(RADEON_CNTL_BITBLT_MULTI, 5));
1713 OUT_RING(RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
1714 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
1715 RADEON_GMC_BRUSH_NONE |
1716 (format << 8) |
1717 RADEON_GMC_SRC_DATATYPE_COLOR |
1718 RADEON_ROP3_S |
1719 RADEON_DP_SRC_SOURCE_MEMORY |
1720 RADEON_GMC_CLR_CMP_CNTL_DIS |
1721 RADEON_GMC_WR_MSK_DIS );
1722 OUT_RING((spitch << 22) | (offset >> 10));
1723 OUT_RING((texpitch << 22) | (tex->offset >> 10));
1724 OUT_RING(0);
1725 OUT_RING((image->x << 16) | image->y);
1726 OUT_RING((image->width << 16) | height);
1727 RADEON_WAIT_UNTIL_2D_IDLE();
1728 ADVANCE_RING();
1729
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001730 radeon_cp_discard_buffer(dev, buf);
David Dawes0e5b8d72001-03-19 17:45:52 +00001731
Michel Daenzerfac2ed42003-02-06 18:20:00 +00001732 /* Update the input parameters for next time */
1733 image->y += height;
1734 image->height -= height;
Dave Airlie02df04d2004-07-25 08:47:38 +00001735 image->data = (const u8 __user *)image->data + size;
Keith Whitwellb03fa552002-12-06 12:22:43 +00001736 } while (image->height > 0);
Kevin E Martin0994e632001-01-05 22:57:55 +00001737
1738 /* Flush the pixel cache after the blit completes. This ensures
1739 * the texture data is written out to memory before rendering
1740 * continues.
1741 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001742 BEGIN_RING(4);
Kevin E Martin0994e632001-01-05 22:57:55 +00001743 RADEON_FLUSH_CACHE();
1744 RADEON_WAIT_UNTIL_2D_IDLE();
Kevin E Martin0994e632001-01-05 22:57:55 +00001745 ADVANCE_RING();
Keith Whitwellb03fa552002-12-06 12:22:43 +00001746 return 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001747}
1748
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001749static void radeon_cp_dispatch_stipple(drm_device_t * dev, u32 * stipple)
Kevin E Martin0994e632001-01-05 22:57:55 +00001750{
1751 drm_radeon_private_t *dev_priv = dev->dev_private;
1752 int i;
1753 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001754 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00001755
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001756 BEGIN_RING(35);
Kevin E Martin0994e632001-01-05 22:57:55 +00001757
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001758 OUT_RING(CP_PACKET0(RADEON_RE_STIPPLE_ADDR, 0));
1759 OUT_RING(0x00000000);
Kevin E Martin0994e632001-01-05 22:57:55 +00001760
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001761 OUT_RING(CP_PACKET0_TABLE(RADEON_RE_STIPPLE_DATA, 31));
1762 for (i = 0; i < 32; i++) {
1763 OUT_RING(stipple[i]);
Kevin E Martin0994e632001-01-05 22:57:55 +00001764 }
1765
1766 ADVANCE_RING();
1767}
1768
Roland Scheidegger43c32232005-01-26 17:48:59 +00001769static void radeon_apply_surface_regs(int surf_index, drm_radeon_private_t *dev_priv)
1770{
1771 if (!dev_priv->mmio)
1772 return;
1773
1774 radeon_do_cp_idle(dev_priv);
1775
1776 RADEON_WRITE(RADEON_SURFACE0_INFO + 16*surf_index,
1777 dev_priv->surfaces[surf_index].flags);
1778 RADEON_WRITE(RADEON_SURFACE0_LOWER_BOUND + 16*surf_index,
1779 dev_priv->surfaces[surf_index].lower);
1780 RADEON_WRITE(RADEON_SURFACE0_UPPER_BOUND + 16*surf_index,
1781 dev_priv->surfaces[surf_index].upper);
1782}
1783
1784/* Allocates a virtual surface
1785 * doesn't always allocate a real surface, will stretch an existing
1786 * surface when possible.
1787 *
1788 * Note that refcount can be at most 2, since during a free refcount=3
1789 * might mean we have to allocate a new surface which might not always
1790 * be available.
1791 * For example : we allocate three contigous surfaces ABC. If B is
1792 * freed, we suddenly need two surfaces to store A and C, which might
1793 * not always be available.
1794 */
1795static int alloc_surface(drm_radeon_surface_alloc_t* new, drm_radeon_private_t *dev_priv, DRMFILE filp)
1796{
1797 struct radeon_virt_surface *s;
1798 int i;
1799 int virt_surface_index;
1800 uint32_t new_upper, new_lower;
1801
1802 new_lower = new->address;
1803 new_upper = new_lower + new->size - 1;
1804
1805 /* sanity check */
1806 if ((new_lower >= new_upper) || (new->flags == 0) || (new->size == 0) ||
1807 ((new_upper & RADEON_SURF_ADDRESS_FIXED_MASK) != RADEON_SURF_ADDRESS_FIXED_MASK) ||
1808 ((new_lower & RADEON_SURF_ADDRESS_FIXED_MASK) != 0))
1809 return -1;
1810
1811 /* make sure there is no overlap with existing surfaces */
1812 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1813 if ((dev_priv->surfaces[i].refcount != 0) &&
1814 (( (new_lower >= dev_priv->surfaces[i].lower) &&
1815 (new_lower < dev_priv->surfaces[i].upper) ) ||
1816 ( (new_lower < dev_priv->surfaces[i].lower) &&
1817 (new_upper > dev_priv->surfaces[i].lower) )) ){
1818 return -1;}
1819 }
1820
1821 /* find a virtual surface */
1822 for (i = 0; i < 2*RADEON_MAX_SURFACES; i++)
1823 if (dev_priv->virt_surfaces[i].filp == 0)
1824 break;
1825 if (i == 2*RADEON_MAX_SURFACES) {
1826 return -1;}
1827 virt_surface_index = i;
1828
1829 /* try to reuse an existing surface */
1830 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1831 /* extend before */
1832 if ((dev_priv->surfaces[i].refcount == 1) &&
1833 (new->flags == dev_priv->surfaces[i].flags) &&
1834 (new_upper + 1 == dev_priv->surfaces[i].lower)) {
1835 s = &(dev_priv->virt_surfaces[virt_surface_index]);
1836 s->surface_index = i;
1837 s->lower = new_lower;
1838 s->upper = new_upper;
1839 s->flags = new->flags;
1840 s->filp = filp;
1841 dev_priv->surfaces[i].refcount++;
1842 dev_priv->surfaces[i].lower = s->lower;
1843 radeon_apply_surface_regs(s->surface_index, dev_priv);
1844 return virt_surface_index;
1845 }
1846
1847 /* extend after */
1848 if ((dev_priv->surfaces[i].refcount == 1) &&
1849 (new->flags == dev_priv->surfaces[i].flags) &&
1850 (new_lower == dev_priv->surfaces[i].upper + 1)) {
1851 s = &(dev_priv->virt_surfaces[virt_surface_index]);
1852 s->surface_index = i;
1853 s->lower = new_lower;
1854 s->upper = new_upper;
1855 s->flags = new->flags;
1856 s->filp = filp;
1857 dev_priv->surfaces[i].refcount++;
1858 dev_priv->surfaces[i].upper = s->upper;
1859 radeon_apply_surface_regs(s->surface_index, dev_priv);
1860 return virt_surface_index;
1861 }
1862 }
1863
1864 /* okay, we need a new one */
1865 for (i = 0; i < RADEON_MAX_SURFACES; i++) {
1866 if (dev_priv->surfaces[i].refcount == 0) {
1867 s = &(dev_priv->virt_surfaces[virt_surface_index]);
1868 s->surface_index = i;
1869 s->lower = new_lower;
1870 s->upper = new_upper;
1871 s->flags = new->flags;
1872 s->filp = filp;
1873 dev_priv->surfaces[i].refcount = 1;
1874 dev_priv->surfaces[i].lower = s->lower;
1875 dev_priv->surfaces[i].upper = s->upper;
1876 dev_priv->surfaces[i].flags = s->flags;
1877 radeon_apply_surface_regs(s->surface_index, dev_priv);
1878 return virt_surface_index;
1879 }
1880 }
1881
1882 /* we didn't find anything */
1883 return -1;
1884}
1885
1886static int free_surface(DRMFILE filp, drm_radeon_private_t *dev_priv, int lower)
1887{
1888 struct radeon_virt_surface *s;
1889 int i;
1890 /* find the virtual surface */
1891 for(i = 0; i < 2*RADEON_MAX_SURFACES; i++) {
1892 s = &(dev_priv->virt_surfaces[i]);
1893 if (s->filp) {
1894 if ((lower == s->lower) && (filp == s->filp)) {
1895 if (dev_priv->surfaces[s->surface_index].lower == s->lower)
1896 dev_priv->surfaces[s->surface_index].lower = s->upper;
1897
1898 if (dev_priv->surfaces[s->surface_index].upper == s->upper)
1899 dev_priv->surfaces[s->surface_index].upper = s->lower;
1900
1901 dev_priv->surfaces[s->surface_index].refcount--;
1902 if (dev_priv->surfaces[s->surface_index].refcount == 0)
1903 dev_priv->surfaces[s->surface_index].flags = 0;
1904 s->filp = 0;
1905 radeon_apply_surface_regs(s->surface_index, dev_priv);
1906 return 0;
1907 }
1908 }
1909 }
1910 return 1;
1911}
1912
1913static void radeon_surfaces_release(DRMFILE filp, drm_radeon_private_t *dev_priv)
1914{
1915 int i;
1916 for( i = 0; i < 2*RADEON_MAX_SURFACES; i++)
1917 {
1918 if (dev_priv->virt_surfaces[i].filp == filp)
1919 free_surface(filp, dev_priv, dev_priv->virt_surfaces[i].lower);
1920 }
1921}
1922
Kevin E Martin0994e632001-01-05 22:57:55 +00001923/* ================================================================
1924 * IOCTL functions
1925 */
1926
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00001927static int radeon_surface_alloc(DRM_IOCTL_ARGS)
Roland Scheidegger43c32232005-01-26 17:48:59 +00001928{
1929 DRM_DEVICE;
1930 drm_radeon_private_t *dev_priv = dev->dev_private;
1931 drm_radeon_surface_alloc_t alloc;
1932
1933 if (!dev_priv) {
1934 DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
1935 return DRM_ERR(EINVAL);
1936 }
1937
1938 DRM_COPY_FROM_USER_IOCTL(alloc, (drm_radeon_surface_alloc_t __user *)data,
1939 sizeof(alloc));
1940
1941 if (alloc_surface(&alloc, dev_priv, filp) == -1)
1942 return DRM_ERR(EINVAL);
1943 else
1944 return 0;
1945}
1946
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00001947static int radeon_surface_free(DRM_IOCTL_ARGS)
Roland Scheidegger43c32232005-01-26 17:48:59 +00001948{
1949 DRM_DEVICE;
1950 drm_radeon_private_t *dev_priv = dev->dev_private;
1951 drm_radeon_surface_free_t memfree;
1952
1953 if (!dev_priv) {
1954 DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
1955 return DRM_ERR(EINVAL);
1956 }
1957
Eric Anholted312752005-02-03 01:05:34 +00001958 DRM_COPY_FROM_USER_IOCTL(memfree, (drm_radeon_surface_free_t __user *)data,
Roland Scheidegger43c32232005-01-26 17:48:59 +00001959 sizeof(memfree) );
1960
1961 if (free_surface(filp, dev_priv, memfree.address))
1962 return DRM_ERR(EINVAL);
1963 else
1964 return 0;
1965}
1966
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00001967static int radeon_cp_clear(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00001968{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001969 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001970 drm_radeon_private_t *dev_priv = dev->dev_private;
1971 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1972 drm_radeon_clear_t clear;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001973 drm_radeon_clear_rect_t depth_boxes[RADEON_NR_SAREA_CLIPRECTS];
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001974 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00001975
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001976 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00001977
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001978 DRM_COPY_FROM_USER_IOCTL(clear, (drm_radeon_clear_t __user *) data,
1979 sizeof(clear));
Kevin E Martin0994e632001-01-05 22:57:55 +00001980
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001981 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001982
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001983 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Kevin E Martin0994e632001-01-05 22:57:55 +00001984 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
1985
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001986 if (DRM_COPY_FROM_USER(&depth_boxes, clear.depth_boxes,
1987 sarea_priv->nbox * sizeof(depth_boxes[0])))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001988 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001989
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001990 radeon_cp_dispatch_clear(dev, &clear, depth_boxes);
Kevin E Martin0994e632001-01-05 22:57:55 +00001991
Keith Whitwell2dcada32002-06-12 15:50:28 +00001992 COMMIT_RING();
1993 return 0;
1994}
1995
Keith Whitwell2dcada32002-06-12 15:50:28 +00001996/* Not sure why this isn't set all the time:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00001997 */
1998static int radeon_do_init_pageflip(drm_device_t * dev)
Keith Whitwell2dcada32002-06-12 15:50:28 +00001999{
2000 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell24025ca2002-07-04 12:03:15 +00002001 RING_LOCALS;
2002
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002003 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002004
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002005 BEGIN_RING(6);
Keith Whitwell24025ca2002-07-04 12:03:15 +00002006 RADEON_WAIT_UNTIL_3D_IDLE();
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002007 OUT_RING(CP_PACKET0(RADEON_CRTC_OFFSET_CNTL, 0));
2008 OUT_RING(RADEON_READ(RADEON_CRTC_OFFSET_CNTL) |
2009 RADEON_CRTC_OFFSET_FLIP_CNTL);
2010 OUT_RING(CP_PACKET0(RADEON_CRTC2_OFFSET_CNTL, 0));
2011 OUT_RING(RADEON_READ(RADEON_CRTC2_OFFSET_CNTL) |
2012 RADEON_CRTC_OFFSET_FLIP_CNTL);
Keith Whitwell24025ca2002-07-04 12:03:15 +00002013 ADVANCE_RING();
2014
Keith Whitwell2dcada32002-06-12 15:50:28 +00002015 dev_priv->page_flipping = 1;
2016 dev_priv->current_page = 0;
Keith Whitwellbb91bc02002-06-27 17:56:39 +00002017 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002018
2019 return 0;
2020}
2021
Jon Smirlfa6b1d12004-09-27 19:51:38 +00002022/* Called whenever a client dies, from drm_release.
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002023 * NOTE: Lock isn't necessarily held when this is called!
2024 */
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002025static int radeon_do_cleanup_pageflip(drm_device_t * dev)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002026{
2027 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002028 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002029
Keith Whitwell24025ca2002-07-04 12:03:15 +00002030 if (dev_priv->current_page != 0)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002031 radeon_cp_dispatch_flip(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002032
2033 dev_priv->page_flipping = 0;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002034 return 0;
2035}
2036
2037/* Swapping and flipping are different operations, need different ioctls.
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002038 * They can & should be intermixed to support multiple 3d windows.
Keith Whitwell2dcada32002-06-12 15:50:28 +00002039 */
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002040static int radeon_cp_flip(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002041{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002042 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002043 drm_radeon_private_t *dev_priv = dev->dev_private;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002044 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002045
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002046 LOCK_TEST_WITH_RETURN(dev, filp);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002047
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002048 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002049
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002050 if (!dev_priv->page_flipping)
2051 radeon_do_init_pageflip(dev);
2052
2053 radeon_cp_dispatch_flip(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002054
2055 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002056 return 0;
2057}
2058
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002059static int radeon_cp_swap(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002060{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002061 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002062 drm_radeon_private_t *dev_priv = dev->dev_private;
2063 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002064 DRM_DEBUG("\n");
Kevin E Martin0994e632001-01-05 22:57:55 +00002065
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002066 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002067
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002068 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Gareth Hughes4d2a4452001-01-24 15:34:46 +00002069
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002070 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002071 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
2072
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002073 radeon_cp_dispatch_swap(dev);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002074 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00002075
Keith Whitwell2dcada32002-06-12 15:50:28 +00002076 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002077 return 0;
2078}
2079
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002080static int radeon_cp_vertex(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002081{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002082 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002083 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002084 drm_file_t *filp_priv;
David Dawesab87c5d2002-02-14 02:00:26 +00002085 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00002086 drm_device_dma_t *dma = dev->dma;
2087 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002088 drm_radeon_vertex_t vertex;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002089 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00002090
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002091 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002092
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002093 if (!dev_priv) {
2094 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002095 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002096 }
2097
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002098 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002099
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002100 DRM_COPY_FROM_USER_IOCTL(vertex, (drm_radeon_vertex_t __user *) data,
2101 sizeof(vertex));
Kevin E Martin0994e632001-01-05 22:57:55 +00002102
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002103 DRM_DEBUG("pid=%d index=%d count=%d discard=%d\n",
2104 DRM_CURRENTPID, vertex.idx, vertex.count, vertex.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002105
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002106 if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
2107 DRM_ERROR("buffer index %d (of %d max)\n",
2108 vertex.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002109 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002110 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002111 if (vertex.prim < 0 || vertex.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST) {
2112 DRM_ERROR("buffer prim %d\n", vertex.prim);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002113 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002114 }
2115
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002116 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2117 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002118
2119 buf = dma->buflist[vertex.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002120
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002121 if (buf->filp != filp) {
2122 DRM_ERROR("process %d using buffer owned by %p\n",
2123 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002124 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002125 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002126 if (buf->pending) {
2127 DRM_ERROR("sending pending buffer %d\n", vertex.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002128 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002129 }
2130
Keith Whitwell2dcada32002-06-12 15:50:28 +00002131 /* Build up a prim_t record:
2132 */
Keith Whitwellbaef0862002-03-08 16:03:37 +00002133 if (vertex.count) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002134 buf->used = vertex.count; /* not used? */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002135
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002136 if (sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS) {
2137 if (radeon_emit_state(dev_priv, filp_priv,
2138 &sarea_priv->context_state,
2139 sarea_priv->tex_state,
2140 sarea_priv->dirty)) {
2141 DRM_ERROR("radeon_emit_state failed\n");
2142 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002143 }
2144
Keith Whitwellbaef0862002-03-08 16:03:37 +00002145 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
2146 RADEON_UPLOAD_TEX1IMAGES |
2147 RADEON_UPLOAD_TEX2IMAGES |
2148 RADEON_REQUIRE_QUIESCENCE);
2149 }
David Dawesab87c5d2002-02-14 02:00:26 +00002150
Keith Whitwellbaef0862002-03-08 16:03:37 +00002151 prim.start = 0;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002152 prim.finish = vertex.count; /* unused */
Keith Whitwellbaef0862002-03-08 16:03:37 +00002153 prim.prim = vertex.prim;
Keith Whitwellbaef0862002-03-08 16:03:37 +00002154 prim.numverts = vertex.count;
2155 prim.vc_format = dev_priv->sarea_priv->vc_format;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002156
2157 radeon_cp_dispatch_vertex(dev, buf, &prim);
David Dawesab87c5d2002-02-14 02:00:26 +00002158 }
2159
David Dawesab87c5d2002-02-14 02:00:26 +00002160 if (vertex.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002161 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002162 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002163
Keith Whitwell2dcada32002-06-12 15:50:28 +00002164 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002165 return 0;
2166}
2167
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002168static int radeon_cp_indices(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002169{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002170 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002171 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002172 drm_file_t *filp_priv;
David Dawesab87c5d2002-02-14 02:00:26 +00002173 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00002174 drm_device_dma_t *dma = dev->dma;
2175 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002176 drm_radeon_indices_t elts;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002177 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00002178 int count;
2179
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002180 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002181
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002182 if (!dev_priv) {
2183 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002184 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002185 }
2186
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002187 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002188
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002189 DRM_COPY_FROM_USER_IOCTL(elts, (drm_radeon_indices_t __user *) data,
2190 sizeof(elts));
Kevin E Martin0994e632001-01-05 22:57:55 +00002191
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002192 DRM_DEBUG("pid=%d index=%d start=%d end=%d discard=%d\n",
2193 DRM_CURRENTPID, elts.idx, elts.start, elts.end, elts.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002194
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002195 if (elts.idx < 0 || elts.idx >= dma->buf_count) {
2196 DRM_ERROR("buffer index %d (of %d max)\n",
2197 elts.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002198 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002199 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002200 if (elts.prim < 0 || elts.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST) {
2201 DRM_ERROR("buffer prim %d\n", elts.prim);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002202 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002203 }
2204
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002205 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2206 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002207
2208 buf = dma->buflist[elts.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002209
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002210 if (buf->filp != filp) {
2211 DRM_ERROR("process %d using buffer owned by %p\n",
2212 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002213 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002214 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002215 if (buf->pending) {
2216 DRM_ERROR("sending pending buffer %d\n", elts.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002217 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002218 }
2219
2220 count = (elts.end - elts.start) / sizeof(u16);
2221 elts.start -= RADEON_INDEX_PRIM_OFFSET;
2222
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002223 if (elts.start & 0x7) {
2224 DRM_ERROR("misaligned buffer 0x%x\n", elts.start);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002225 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002226 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002227 if (elts.start < buf->used) {
2228 DRM_ERROR("no header 0x%x - 0x%x\n", elts.start, buf->used);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002229 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002230 }
2231
2232 buf->used = elts.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00002233
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002234 if (sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS) {
2235 if (radeon_emit_state(dev_priv, filp_priv,
2236 &sarea_priv->context_state,
2237 sarea_priv->tex_state,
2238 sarea_priv->dirty)) {
2239 DRM_ERROR("radeon_emit_state failed\n");
2240 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002241 }
David Dawesab87c5d2002-02-14 02:00:26 +00002242
2243 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
2244 RADEON_UPLOAD_TEX1IMAGES |
2245 RADEON_UPLOAD_TEX2IMAGES |
2246 RADEON_REQUIRE_QUIESCENCE);
2247 }
2248
David Dawesab87c5d2002-02-14 02:00:26 +00002249 /* Build up a prim_t record:
2250 */
2251 prim.start = elts.start;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002252 prim.finish = elts.end;
David Dawesab87c5d2002-02-14 02:00:26 +00002253 prim.prim = elts.prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002254 prim.offset = 0; /* offset from start of dma buffers */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002255 prim.numverts = RADEON_MAX_VB_VERTS; /* duh */
David Dawesab87c5d2002-02-14 02:00:26 +00002256 prim.vc_format = dev_priv->sarea_priv->vc_format;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002257
2258 radeon_cp_dispatch_indices(dev, buf, &prim);
David Dawesab87c5d2002-02-14 02:00:26 +00002259 if (elts.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002260 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002261 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002262
Keith Whitwell2dcada32002-06-12 15:50:28 +00002263 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002264 return 0;
2265}
2266
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002267static int radeon_cp_texture(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002268{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002269 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002270 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002271 drm_radeon_texture_t tex;
2272 drm_radeon_tex_image_t image;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002273 int ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00002274
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002275 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00002276
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002277 DRM_COPY_FROM_USER_IOCTL(tex, (drm_radeon_texture_t __user *) data,
2278 sizeof(tex));
Gareth Hughes3a74d3a2001-03-06 04:37:37 +00002279
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002280 if (tex.image == NULL) {
2281 DRM_ERROR("null texture image!\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002282 return DRM_ERR(EINVAL);
David Dawes0e5b8d72001-03-19 17:45:52 +00002283 }
2284
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002285 if (DRM_COPY_FROM_USER(&image,
2286 (drm_radeon_tex_image_t __user *) tex.image,
2287 sizeof(image)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002288 return DRM_ERR(EFAULT);
David Dawes0e5b8d72001-03-19 17:45:52 +00002289
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002290 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2291 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002292
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002293 ret = radeon_cp_dispatch_texture(filp, dev, &tex, &image);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002294
2295 COMMIT_RING();
2296 return ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00002297}
2298
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002299static int radeon_cp_stipple(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002300{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002301 DRM_DEVICE;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002302 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00002303 drm_radeon_stipple_t stipple;
2304 u32 mask[32];
2305
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002306 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin0994e632001-01-05 22:57:55 +00002307
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002308 DRM_COPY_FROM_USER_IOCTL(stipple, (drm_radeon_stipple_t __user *) data,
2309 sizeof(stipple));
Kevin E Martin0994e632001-01-05 22:57:55 +00002310
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002311 if (DRM_COPY_FROM_USER(&mask, stipple.mask, 32 * sizeof(u32)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002312 return DRM_ERR(EFAULT);
Kevin E Martin0994e632001-01-05 22:57:55 +00002313
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002314 RING_SPACE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002315
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002316 radeon_cp_dispatch_stipple(dev, mask);
Kevin E Martin0994e632001-01-05 22:57:55 +00002317
Keith Whitwell2dcada32002-06-12 15:50:28 +00002318 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002319 return 0;
2320}
2321
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002322static int radeon_cp_indirect(DRM_IOCTL_ARGS)
Kevin E Martin0994e632001-01-05 22:57:55 +00002323{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002324 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00002325 drm_radeon_private_t *dev_priv = dev->dev_private;
2326 drm_device_dma_t *dma = dev->dma;
2327 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00002328 drm_radeon_indirect_t indirect;
2329 RING_LOCALS;
2330
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002331 LOCK_TEST_WITH_RETURN(dev, filp);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00002332
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002333 if (!dev_priv) {
2334 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002335 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002336 }
2337
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002338 DRM_COPY_FROM_USER_IOCTL(indirect,
2339 (drm_radeon_indirect_t __user *) data,
2340 sizeof(indirect));
Kevin E Martin0994e632001-01-05 22:57:55 +00002341
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002342 DRM_DEBUG("indirect: idx=%d s=%d e=%d d=%d\n",
2343 indirect.idx, indirect.start, indirect.end, indirect.discard);
Kevin E Martin0994e632001-01-05 22:57:55 +00002344
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002345 if (indirect.idx < 0 || indirect.idx >= dma->buf_count) {
2346 DRM_ERROR("buffer index %d (of %d max)\n",
2347 indirect.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002348 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002349 }
2350
2351 buf = dma->buflist[indirect.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00002352
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002353 if (buf->filp != filp) {
2354 DRM_ERROR("process %d using buffer owned by %p\n",
2355 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002356 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002357 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002358 if (buf->pending) {
2359 DRM_ERROR("sending pending buffer %d\n", indirect.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002360 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002361 }
2362
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002363 if (indirect.start < buf->used) {
2364 DRM_ERROR("reusing indirect: start=0x%x actual=0x%x\n",
2365 indirect.start, buf->used);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002366 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00002367 }
2368
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002369 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2370 VB_AGE_TEST_WITH_RETURN(dev_priv);
Kevin E Martin0994e632001-01-05 22:57:55 +00002371
2372 buf->used = indirect.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00002373
2374 /* Wait for the 3D stream to idle before the indirect buffer
2375 * containing 2D acceleration commands is processed.
2376 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002377 BEGIN_RING(2);
Kevin E Martin0994e632001-01-05 22:57:55 +00002378
2379 RADEON_WAIT_UNTIL_3D_IDLE();
2380
2381 ADVANCE_RING();
2382
2383 /* Dispatch the indirect buffer full of commands from the
2384 * X server. This is insecure and is thus only available to
2385 * privileged clients.
2386 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002387 radeon_cp_dispatch_indirect(dev, buf, indirect.start, indirect.end);
David Dawesab87c5d2002-02-14 02:00:26 +00002388 if (indirect.discard) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002389 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002390 }
2391
Keith Whitwell2dcada32002-06-12 15:50:28 +00002392 COMMIT_RING();
David Dawesab87c5d2002-02-14 02:00:26 +00002393 return 0;
2394}
2395
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002396static int radeon_cp_vertex2(DRM_IOCTL_ARGS)
David Dawesab87c5d2002-02-14 02:00:26 +00002397{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002398 DRM_DEVICE;
David Dawesab87c5d2002-02-14 02:00:26 +00002399 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002400 drm_file_t *filp_priv;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002401 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +00002402 drm_device_dma_t *dma = dev->dma;
2403 drm_buf_t *buf;
David Dawesab87c5d2002-02-14 02:00:26 +00002404 drm_radeon_vertex2_t vertex;
2405 int i;
2406 unsigned char laststate;
2407
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002408 LOCK_TEST_WITH_RETURN(dev, filp);
David Dawesab87c5d2002-02-14 02:00:26 +00002409
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002410 if (!dev_priv) {
2411 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002412 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002413 }
2414
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002415 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002416
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002417 DRM_COPY_FROM_USER_IOCTL(vertex, (drm_radeon_vertex2_t __user *) data,
2418 sizeof(vertex));
David Dawesab87c5d2002-02-14 02:00:26 +00002419
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002420 DRM_DEBUG("pid=%d index=%d discard=%d\n",
2421 DRM_CURRENTPID, vertex.idx, vertex.discard);
David Dawesab87c5d2002-02-14 02:00:26 +00002422
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002423 if (vertex.idx < 0 || vertex.idx >= dma->buf_count) {
2424 DRM_ERROR("buffer index %d (of %d max)\n",
2425 vertex.idx, dma->buf_count - 1);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002426 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002427 }
2428
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002429 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2430 VB_AGE_TEST_WITH_RETURN(dev_priv);
David Dawesab87c5d2002-02-14 02:00:26 +00002431
2432 buf = dma->buflist[vertex.idx];
David Dawesab87c5d2002-02-14 02:00:26 +00002433
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002434 if (buf->filp != filp) {
2435 DRM_ERROR("process %d using buffer owned by %p\n",
2436 DRM_CURRENTPID, buf->filp);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002437 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002438 }
2439
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002440 if (buf->pending) {
2441 DRM_ERROR("sending pending buffer %d\n", vertex.idx);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002442 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002443 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002444
Keith Whitwell2dcada32002-06-12 15:50:28 +00002445 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002446 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00002447
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002448 for (laststate = 0xff, i = 0; i < vertex.nr_prims; i++) {
David Dawesab87c5d2002-02-14 02:00:26 +00002449 drm_radeon_prim_t prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002450 drm_radeon_tcl_prim_t tclprim;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002451
2452 if (DRM_COPY_FROM_USER(&prim, &vertex.prim[i], sizeof(prim)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002453 return DRM_ERR(EFAULT);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002454
2455 if (prim.stateidx != laststate) {
2456 drm_radeon_state_t state;
2457
2458 if (DRM_COPY_FROM_USER(&state,
2459 &vertex.state[prim.stateidx],
2460 sizeof(state)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002461 return DRM_ERR(EFAULT);
David Dawesab87c5d2002-02-14 02:00:26 +00002462
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002463 if (radeon_emit_state2(dev_priv, filp_priv, &state)) {
2464 DRM_ERROR("radeon_emit_state2 failed\n");
2465 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002466 }
David Dawesab87c5d2002-02-14 02:00:26 +00002467
2468 laststate = prim.stateidx;
2469 }
2470
Keith Whitwell2dcada32002-06-12 15:50:28 +00002471 tclprim.start = prim.start;
2472 tclprim.finish = prim.finish;
2473 tclprim.prim = prim.prim;
2474 tclprim.vc_format = prim.vc_format;
David Dawesab87c5d2002-02-14 02:00:26 +00002475
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002476 if (prim.prim & RADEON_PRIM_WALK_IND) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002477 tclprim.offset = prim.numverts * 64;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002478 tclprim.numverts = RADEON_MAX_VB_VERTS; /* duh */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002479
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002480 radeon_cp_dispatch_indices(dev, buf, &tclprim);
David Dawesab87c5d2002-02-14 02:00:26 +00002481 } else {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002482 tclprim.numverts = prim.numverts;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002483 tclprim.offset = 0; /* not used */
Keith Whitwell2dcada32002-06-12 15:50:28 +00002484
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002485 radeon_cp_dispatch_vertex(dev, buf, &tclprim);
David Dawesab87c5d2002-02-14 02:00:26 +00002486 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002487
Keith Whitwell2dcada32002-06-12 15:50:28 +00002488 if (sarea_priv->nbox == 1)
2489 sarea_priv->nbox = 0;
David Dawesab87c5d2002-02-14 02:00:26 +00002490 }
2491
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002492 if (vertex.discard) {
2493 radeon_cp_discard_buffer(dev, buf);
David Dawesab87c5d2002-02-14 02:00:26 +00002494 }
Kevin E Martin0994e632001-01-05 22:57:55 +00002495
Keith Whitwell2dcada32002-06-12 15:50:28 +00002496 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00002497 return 0;
2498}
Keith Whitwell2dcada32002-06-12 15:50:28 +00002499
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002500static int radeon_emit_packets(drm_radeon_private_t * dev_priv,
2501 drm_file_t * filp_priv,
2502 drm_radeon_cmd_header_t header,
2503 drm_radeon_cmd_buffer_t * cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002504{
2505 int id = (int)header.packet.packet_id;
Brian Paulff25e702002-10-28 19:05:40 +00002506 int sz, reg;
Eric Anholt81459d62005-02-08 04:17:14 +00002507 int *data = (int *)cmdbuf->buf;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002508 RING_LOCALS;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002509
Brian Paulff25e702002-10-28 19:05:40 +00002510 if (id >= RADEON_MAX_STATE_PACKETS)
2511 return DRM_ERR(EINVAL);
2512
2513 sz = packet[id].len;
2514 reg = packet[id].start;
2515
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002516 if (sz * sizeof(int) > cmdbuf->bufsz) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002517 DRM_ERROR("Packet size provided larger than data provided\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002518 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002519 }
2520
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002521 if (radeon_check_and_fixup_packets(dev_priv, filp_priv, id, data)) {
2522 DRM_ERROR("Packet verification failed\n");
2523 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002524 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002525
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002526 BEGIN_RING(sz + 1);
2527 OUT_RING(CP_PACKET0(reg, (sz - 1)));
Eric Anholt81459d62005-02-08 04:17:14 +00002528 OUT_RING_TABLE(data, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002529 ADVANCE_RING();
2530
2531 cmdbuf->buf += sz * sizeof(int);
2532 cmdbuf->bufsz -= sz * sizeof(int);
2533 return 0;
2534}
2535
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002536static __inline__ int radeon_emit_scalars(drm_radeon_private_t * dev_priv,
2537 drm_radeon_cmd_header_t header,
2538 drm_radeon_cmd_buffer_t * cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002539{
2540 int sz = header.scalars.count;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002541 int start = header.scalars.offset;
2542 int stride = header.scalars.stride;
2543 RING_LOCALS;
2544
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002545 BEGIN_RING(3 + sz);
2546 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2547 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2548 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
Eric Anholt81459d62005-02-08 04:17:14 +00002549 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002550 ADVANCE_RING();
2551 cmdbuf->buf += sz * sizeof(int);
2552 cmdbuf->bufsz -= sz * sizeof(int);
2553 return 0;
2554}
2555
Keith Whitwell48cc3502002-08-26 22:16:18 +00002556/* God this is ugly
2557 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002558static __inline__ int radeon_emit_scalars2(drm_radeon_private_t * dev_priv,
2559 drm_radeon_cmd_header_t header,
2560 drm_radeon_cmd_buffer_t * cmdbuf)
Keith Whitwell48cc3502002-08-26 22:16:18 +00002561{
2562 int sz = header.scalars.count;
Keith Whitwell48cc3502002-08-26 22:16:18 +00002563 int start = ((unsigned int)header.scalars.offset) + 0x100;
2564 int stride = header.scalars.stride;
2565 RING_LOCALS;
2566
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002567 BEGIN_RING(3 + sz);
2568 OUT_RING(CP_PACKET0(RADEON_SE_TCL_SCALAR_INDX_REG, 0));
2569 OUT_RING(start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
2570 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_SCALAR_DATA_REG, sz - 1));
Eric Anholt81459d62005-02-08 04:17:14 +00002571 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell48cc3502002-08-26 22:16:18 +00002572 ADVANCE_RING();
2573 cmdbuf->buf += sz * sizeof(int);
2574 cmdbuf->bufsz -= sz * sizeof(int);
2575 return 0;
2576}
2577
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002578static __inline__ int radeon_emit_vectors(drm_radeon_private_t * dev_priv,
2579 drm_radeon_cmd_header_t header,
2580 drm_radeon_cmd_buffer_t * cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002581{
2582 int sz = header.vectors.count;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002583 int start = header.vectors.offset;
2584 int stride = header.vectors.stride;
2585 RING_LOCALS;
2586
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002587 BEGIN_RING(3 + sz);
2588 OUT_RING(CP_PACKET0(RADEON_SE_TCL_VECTOR_INDX_REG, 0));
2589 OUT_RING(start | (stride << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
2590 OUT_RING(CP_PACKET0_TABLE(RADEON_SE_TCL_VECTOR_DATA_REG, (sz - 1)));
Eric Anholt81459d62005-02-08 04:17:14 +00002591 OUT_RING_TABLE(cmdbuf->buf, sz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002592 ADVANCE_RING();
2593
2594 cmdbuf->buf += sz * sizeof(int);
2595 cmdbuf->bufsz -= sz * sizeof(int);
2596 return 0;
2597}
2598
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002599static int radeon_emit_packet3(drm_device_t * dev,
2600 drm_file_t * filp_priv,
2601 drm_radeon_cmd_buffer_t * cmdbuf)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002602{
2603 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002604 unsigned int cmdsz;
Dave Airlie02df04d2004-07-25 08:47:38 +00002605 int ret;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002606 RING_LOCALS;
2607
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002608 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002609
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002610 if ((ret = radeon_check_and_fixup_packet3(dev_priv, filp_priv,
2611 cmdbuf, &cmdsz))) {
2612 DRM_ERROR("Packet verification failed\n");
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002613 return ret;
2614 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002615
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002616 BEGIN_RING(cmdsz);
Eric Anholt81459d62005-02-08 04:17:14 +00002617 OUT_RING_TABLE(cmdbuf->buf, cmdsz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002618 ADVANCE_RING();
2619
2620 cmdbuf->buf += cmdsz * 4;
2621 cmdbuf->bufsz -= cmdsz * 4;
2622 return 0;
2623}
2624
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002625static int radeon_emit_packet3_cliprect(drm_device_t * dev,
2626 drm_file_t * filp_priv,
2627 drm_radeon_cmd_buffer_t * cmdbuf,
2628 int orig_nbox)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002629{
2630 drm_radeon_private_t *dev_priv = dev->dev_private;
2631 drm_clip_rect_t box;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002632 unsigned int cmdsz;
Dave Airlie02df04d2004-07-25 08:47:38 +00002633 int ret;
2634 drm_clip_rect_t __user *boxes = cmdbuf->boxes;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002635 int i = 0;
2636 RING_LOCALS;
2637
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002638 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002639
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002640 if ((ret = radeon_check_and_fixup_packet3(dev_priv, filp_priv,
2641 cmdbuf, &cmdsz))) {
2642 DRM_ERROR("Packet verification failed\n");
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002643 return ret;
2644 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002645
Keith Whitwell33d57132002-08-12 07:26:00 +00002646 if (!orig_nbox)
2647 goto out;
2648
Keith Whitwell2dcada32002-06-12 15:50:28 +00002649 do {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002650 if (i < cmdbuf->nbox) {
Eric Anholt81459d62005-02-08 04:17:14 +00002651 if (DRM_COPY_FROM_USER(&box, &boxes[i], sizeof(box)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002652 return DRM_ERR(EFAULT);
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002653 /* FIXME The second and subsequent times round
2654 * this loop, send a WAIT_UNTIL_3D_IDLE before
2655 * calling emit_clip_rect(). This fixes a
2656 * lockup on fast machines when sending
2657 * several cliprects with a cmdbuf, as when
2658 * waving a 2D window over a 3D
2659 * window. Something in the commands from user
2660 * space seems to hang the card when they're
2661 * sent several times in a row. That would be
2662 * the correct place to fix it but this works
2663 * around it until I can figure that out - Tim
2664 * Smith */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002665 if (i) {
2666 BEGIN_RING(2);
Tim Smith8fa8db12002-07-17 08:30:36 +00002667 RADEON_WAIT_UNTIL_3D_IDLE();
2668 ADVANCE_RING();
2669 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002670 radeon_emit_clip_rect(dev_priv, &box);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002671 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002672
2673 BEGIN_RING(cmdsz);
Eric Anholt81459d62005-02-08 04:17:14 +00002674 OUT_RING_TABLE(cmdbuf->buf, cmdsz);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002675 ADVANCE_RING();
2676
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002677 } while (++i < cmdbuf->nbox);
2678 if (cmdbuf->nbox == 1)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002679 cmdbuf->nbox = 0;
2680
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002681 out:
Keith Whitwell2dcada32002-06-12 15:50:28 +00002682 cmdbuf->buf += cmdsz * 4;
2683 cmdbuf->bufsz -= cmdsz * 4;
2684 return 0;
2685}
2686
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002687static int radeon_emit_wait(drm_device_t * dev, int flags)
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002688{
2689 drm_radeon_private_t *dev_priv = dev->dev_private;
2690 RING_LOCALS;
2691
2692 DRM_DEBUG("%s: %x\n", __FUNCTION__, flags);
2693 switch (flags) {
2694 case RADEON_WAIT_2D:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002695 BEGIN_RING(2);
2696 RADEON_WAIT_UNTIL_2D_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002697 ADVANCE_RING();
2698 break;
2699 case RADEON_WAIT_3D:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002700 BEGIN_RING(2);
2701 RADEON_WAIT_UNTIL_3D_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002702 ADVANCE_RING();
2703 break;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002704 case RADEON_WAIT_2D | RADEON_WAIT_3D:
2705 BEGIN_RING(2);
2706 RADEON_WAIT_UNTIL_IDLE();
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002707 ADVANCE_RING();
2708 break;
2709 default:
2710 return DRM_ERR(EINVAL);
2711 }
2712
2713 return 0;
2714}
Keith Whitwell2dcada32002-06-12 15:50:28 +00002715
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002716static int radeon_cp_cmdbuf(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002717{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002718 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002719 drm_radeon_private_t *dev_priv = dev->dev_private;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002720 drm_file_t *filp_priv;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002721 drm_device_dma_t *dma = dev->dma;
Dave Airlie8efddd02004-07-15 13:03:55 +00002722 drm_buf_t *buf = NULL;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002723 int idx;
2724 drm_radeon_cmd_buffer_t cmdbuf;
2725 drm_radeon_cmd_header_t header;
Eric Anholt81459d62005-02-08 04:17:14 +00002726 int orig_nbox, orig_bufsz;
Eric Anholt2f7cd382005-02-14 03:22:58 +00002727 char *kbuf = NULL;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002728
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002729 LOCK_TEST_WITH_RETURN(dev, filp);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002730
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002731 if (!dev_priv) {
2732 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002733 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002734 }
2735
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002736 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002737
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002738 DRM_COPY_FROM_USER_IOCTL(cmdbuf,
2739 (drm_radeon_cmd_buffer_t __user *) data,
2740 sizeof(cmdbuf));
Keith Whitwell2dcada32002-06-12 15:50:28 +00002741
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002742 RING_SPACE_TEST_WITH_RETURN(dev_priv);
2743 VB_AGE_TEST_WITH_RETURN(dev_priv);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002744
Eric Anholt81459d62005-02-08 04:17:14 +00002745 if (cmdbuf.bufsz > 64 * 1024 || cmdbuf.bufsz < 0) {
2746 return DRM_ERR(EINVAL);
2747 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002748
Eric Anholt81459d62005-02-08 04:17:14 +00002749 /* Allocate an in-kernel area and copy in the cmdbuf. Do this to avoid
2750 * races between checking values and using those values in other code,
2751 * and simply to avoid a lot of function calls to copy in data.
2752 */
2753 orig_bufsz = cmdbuf.bufsz;
2754 if (orig_bufsz != 0) {
2755 kbuf = drm_alloc(cmdbuf.bufsz, DRM_MEM_DRIVER);
2756 if (kbuf == NULL)
2757 return DRM_ERR(ENOMEM);
Eric Anholt2f7cd382005-02-14 03:22:58 +00002758 if (DRM_COPY_FROM_USER(kbuf, cmdbuf.buf, cmdbuf.bufsz)) {
2759 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
Eric Anholt81459d62005-02-08 04:17:14 +00002760 return DRM_ERR(EFAULT);
Eric Anholt2f7cd382005-02-14 03:22:58 +00002761 }
Eric Anholt81459d62005-02-08 04:17:14 +00002762 cmdbuf.buf = kbuf;
2763 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00002764
Keith Whitwell33d57132002-08-12 07:26:00 +00002765 orig_nbox = cmdbuf.nbox;
Eric Anholtab59dd22005-07-20 21:17:47 +00002766
2767 if(dev_priv->microcode_version == UCODE_R300) {
2768 int temp;
2769 temp=r300_do_cp_cmdbuf(dev, filp, filp_priv, &cmdbuf);
2770
2771 if (orig_bufsz != 0)
2772 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
2773
2774 return temp;
2775 }
2776
2777 /* microcode_version != r300 */
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002778 while (cmdbuf.bufsz >= sizeof(header)) {
Eric Anholt81459d62005-02-08 04:17:14 +00002779 header.i = *(int *)cmdbuf.buf;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002780 cmdbuf.buf += sizeof(header);
2781 cmdbuf.bufsz -= sizeof(header);
2782
2783 switch (header.header.cmd_type) {
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002784 case RADEON_CMD_PACKET:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002785 DRM_DEBUG("RADEON_CMD_PACKET\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002786 if (radeon_emit_packets
2787 (dev_priv, filp_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002788 DRM_ERROR("radeon_emit_packets failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002789 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002790 }
2791 break;
2792
2793 case RADEON_CMD_SCALARS:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002794 DRM_DEBUG("RADEON_CMD_SCALARS\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002795 if (radeon_emit_scalars(dev_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002796 DRM_ERROR("radeon_emit_scalars failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002797 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002798 }
2799 break;
2800
2801 case RADEON_CMD_VECTORS:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002802 DRM_DEBUG("RADEON_CMD_VECTORS\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002803 if (radeon_emit_vectors(dev_priv, header, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002804 DRM_ERROR("radeon_emit_vectors failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002805 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002806 }
2807 break;
2808
2809 case RADEON_CMD_DMA_DISCARD:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002810 DRM_DEBUG("RADEON_CMD_DMA_DISCARD\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002811 idx = header.dma.buf_idx;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002812 if (idx < 0 || idx >= dma->buf_count) {
2813 DRM_ERROR("buffer index %d (of %d max)\n",
2814 idx, dma->buf_count - 1);
Eric Anholt81459d62005-02-08 04:17:14 +00002815 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002816 }
2817
2818 buf = dma->buflist[idx];
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002819 if (buf->filp != filp || buf->pending) {
2820 DRM_ERROR("bad buffer %p %p %d\n",
2821 buf->filp, filp, buf->pending);
Eric Anholt81459d62005-02-08 04:17:14 +00002822 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002823 }
2824
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002825 radeon_cp_discard_buffer(dev, buf);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002826 break;
2827
2828 case RADEON_CMD_PACKET3:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002829 DRM_DEBUG("RADEON_CMD_PACKET3\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002830 if (radeon_emit_packet3(dev, filp_priv, &cmdbuf)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002831 DRM_ERROR("radeon_emit_packet3 failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002832 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002833 }
2834 break;
2835
2836 case RADEON_CMD_PACKET3_CLIP:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002837 DRM_DEBUG("RADEON_CMD_PACKET3_CLIP\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002838 if (radeon_emit_packet3_cliprect
2839 (dev, filp_priv, &cmdbuf, orig_nbox)) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00002840 DRM_ERROR("radeon_emit_packet3_clip failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002841 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002842 }
2843 break;
2844
Keith Whitwell48cc3502002-08-26 22:16:18 +00002845 case RADEON_CMD_SCALARS2:
2846 DRM_DEBUG("RADEON_CMD_SCALARS2\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002847 if (radeon_emit_scalars2(dev_priv, header, &cmdbuf)) {
Keith Whitwell48cc3502002-08-26 22:16:18 +00002848 DRM_ERROR("radeon_emit_scalars2 failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002849 goto err;
Keith Whitwell48cc3502002-08-26 22:16:18 +00002850 }
2851 break;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002852
2853 case RADEON_CMD_WAIT:
2854 DRM_DEBUG("RADEON_CMD_WAIT\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002855 if (radeon_emit_wait(dev, header.wait.flags)) {
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002856 DRM_ERROR("radeon_emit_wait failed\n");
Eric Anholt81459d62005-02-08 04:17:14 +00002857 goto err;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002858 }
2859 break;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002860 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002861 DRM_ERROR("bad cmd_type %d at %p\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +00002862 header.header.cmd_type,
2863 cmdbuf.buf - sizeof(header));
Eric Anholt81459d62005-02-08 04:17:14 +00002864 goto err;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002865 }
2866 }
2867
Eric Anholt81459d62005-02-08 04:17:14 +00002868 if (orig_bufsz != 0)
2869 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
Keith Whitwell48cc3502002-08-26 22:16:18 +00002870 DRM_DEBUG("DONE\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00002871 COMMIT_RING();
Eric Anholtab59dd22005-07-20 21:17:47 +00002872
Keith Whitwell2dcada32002-06-12 15:50:28 +00002873 return 0;
Eric Anholt81459d62005-02-08 04:17:14 +00002874
2875err:
2876 if (orig_bufsz != 0)
2877 drm_free(kbuf, orig_bufsz, DRM_MEM_DRIVER);
2878 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002879}
2880
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002881static int radeon_cp_getparam(DRM_IOCTL_ARGS)
Keith Whitwell2dcada32002-06-12 15:50:28 +00002882{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002883 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002884 drm_radeon_private_t *dev_priv = dev->dev_private;
2885 drm_radeon_getparam_t param;
2886 int value;
2887
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002888 if (!dev_priv) {
2889 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002890 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002891 }
2892
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002893 DRM_COPY_FROM_USER_IOCTL(param, (drm_radeon_getparam_t __user *) data,
2894 sizeof(param));
Keith Whitwell2dcada32002-06-12 15:50:28 +00002895
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002896 DRM_DEBUG("pid=%d\n", DRM_CURRENTPID);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002897
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002898 switch (param.param) {
Michel Daenzer062751a2003-08-26 15:44:01 +00002899 case RADEON_PARAM_GART_BUFFER_OFFSET:
2900 value = dev_priv->gart_buffers_offset;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002901 break;
Michel Daenzerfd86ac92002-07-11 20:31:12 +00002902 case RADEON_PARAM_LAST_FRAME:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002903 dev_priv->stats.last_frame_reads++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002904 value = GET_SCRATCH(0);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00002905 break;
2906 case RADEON_PARAM_LAST_DISPATCH:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002907 value = GET_SCRATCH(1);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00002908 break;
2909 case RADEON_PARAM_LAST_CLEAR:
Keith Whitwell48cc3502002-08-26 22:16:18 +00002910 dev_priv->stats.last_clear_reads++;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002911 value = GET_SCRATCH(2);
Michel Daenzerfd86ac92002-07-11 20:31:12 +00002912 break;
Michel Daenzerf40674e2002-09-25 19:48:51 +00002913 case RADEON_PARAM_IRQ_NR:
2914 value = dev->irq;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002915 break;
Michel Daenzer062751a2003-08-26 15:44:01 +00002916 case RADEON_PARAM_GART_BASE:
2917 value = dev_priv->gart_vm_start;
Keith Whitwellf1c8fe92002-09-23 17:26:43 +00002918 break;
Keith Whitwell13211ad2003-04-22 09:49:14 +00002919 case RADEON_PARAM_REGISTER_HANDLE:
Jon Smirlbb9502a2005-08-04 13:59:48 +00002920 value = dev_priv->mmio->offset;
Keith Whitwell13211ad2003-04-22 09:49:14 +00002921 break;
2922 case RADEON_PARAM_STATUS_HANDLE:
2923 value = dev_priv->ring_rptr_offset;
2924 break;
Dave Airliebc142802004-04-08 12:05:25 +00002925#if BITS_PER_LONG == 32
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002926 /*
2927 * This ioctl() doesn't work on 64-bit platforms because hw_lock is a
2928 * pointer which can't fit into an int-sized variable. According to
2929 * Michel Dänzer, the ioctl() is only used on embedded platforms, so
2930 * not supporting it shouldn't be a problem. If the same functionality
2931 * is needed on 64-bit platforms, a new ioctl() would have to be added,
2932 * so backwards-compatibility for the embedded platforms can be
2933 * maintained. --davidm 4-Feb-2004.
2934 */
Keith Whitwell13211ad2003-04-22 09:49:14 +00002935 case RADEON_PARAM_SAREA_HANDLE:
2936 /* The lock is the first dword in the sarea. */
Dave Airlie4cfd0d52004-07-05 11:44:30 +00002937 value = (long)dev->lock.hw_lock;
2938 break;
Dave Airliebc142802004-04-08 12:05:25 +00002939#endif
Michel Daenzer062751a2003-08-26 15:44:01 +00002940 case RADEON_PARAM_GART_TEX_HANDLE:
2941 value = dev_priv->gart_textures_offset;
Keith Whitwell13211ad2003-04-22 09:49:14 +00002942 break;
Keith Whitwell2dcada32002-06-12 15:50:28 +00002943 default:
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002944 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002945 }
2946
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002947 if (DRM_COPY_TO_USER(param.value, &value, sizeof(int))) {
2948 DRM_ERROR("copy_to_user\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00002949 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00002950 }
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002951
Keith Whitwell2dcada32002-06-12 15:50:28 +00002952 return 0;
2953}
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002954
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00002955static int radeon_cp_setparam(DRM_IOCTL_ARGS)
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002956{
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002957 DRM_DEVICE;
2958 drm_radeon_private_t *dev_priv = dev->dev_private;
2959 drm_file_t *filp_priv;
2960 drm_radeon_setparam_t sp;
Dave Airlied4dbf452004-08-24 11:15:53 +00002961 struct drm_radeon_driver_file_fields *radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002962
2963 if (!dev_priv) {
2964 DRM_ERROR("%s called with no initialization\n", __FUNCTION__);
2965 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002966 }
2967
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002968 DRM_GET_PRIV_WITH_RETURN(filp_priv, filp);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002969
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002970 DRM_COPY_FROM_USER_IOCTL(sp, (drm_radeon_setparam_t __user *) data,
2971 sizeof(sp));
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002972
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002973 switch (sp.param) {
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002974 case RADEON_SETPARAM_FB_LOCATION:
Dave Airlied4dbf452004-08-24 11:15:53 +00002975 radeon_priv = filp_priv->driver_priv;
2976 radeon_priv->radeon_fb_delta = dev_priv->fb_location - sp.value;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002977 break;
Roland Scheidegger43c32232005-01-26 17:48:59 +00002978 case RADEON_SETPARAM_SWITCH_TILING:
2979 if (sp.value == 0) {
2980 DRM_DEBUG( "color tiling disabled\n" );
2981 dev_priv->front_pitch_offset &= ~RADEON_DST_TILE_MACRO;
2982 dev_priv->back_pitch_offset &= ~RADEON_DST_TILE_MACRO;
2983 dev_priv->sarea_priv->tiling_enabled = 0;
2984 }
2985 else if (sp.value == 1) {
2986 DRM_DEBUG( "color tiling enabled\n" );
2987 dev_priv->front_pitch_offset |= RADEON_DST_TILE_MACRO;
2988 dev_priv->back_pitch_offset |= RADEON_DST_TILE_MACRO;
2989 dev_priv->sarea_priv->tiling_enabled = 1;
2990 }
2991 break;
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002992 default:
Jon Smirl9f9a8f12004-09-30 21:12:10 +00002993 DRM_DEBUG("Invalid parameter %d\n", sp.param);
2994 return DRM_ERR(EINVAL);
Michel Daenzer2655ccd2003-11-04 00:46:05 +00002995 }
2996
2997 return 0;
2998}
Dave Airlie5c9ed832004-08-17 13:10:05 +00002999
3000/* When a client dies:
3001 * - Check for and clean up flipped page state
3002 * - Free any alloced GART memory.
Roland Scheidegger43c32232005-01-26 17:48:59 +00003003 * - Free any alloced radeon surfaces.
Dave Airlie5c9ed832004-08-17 13:10:05 +00003004 *
3005 * DRM infrastructure takes care of reclaiming dma buffers.
3006 */
Eric Anholtc789ea12005-08-05 03:50:23 +00003007void radeon_driver_preclose(drm_device_t * dev, DRMFILE filp)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003008{
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003009 if (dev->dev_private) {
3010 drm_radeon_private_t *dev_priv = dev->dev_private;
3011 if (dev_priv->page_flipping) {
3012 radeon_do_cleanup_pageflip(dev);
3013 }
3014 radeon_mem_release(filp, dev_priv->gart_heap);
3015 radeon_mem_release(filp, dev_priv->fb_heap);
Roland Scheidegger43c32232005-01-26 17:48:59 +00003016 radeon_surfaces_release(filp, dev_priv);
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003017 }
Dave Airlie5c9ed832004-08-17 13:10:05 +00003018}
3019
Eric Anholtc789ea12005-08-05 03:50:23 +00003020void radeon_driver_lastclose(drm_device_t * dev)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003021{
3022 radeon_do_release(dev);
3023}
3024
Eric Anholtc789ea12005-08-05 03:50:23 +00003025int radeon_driver_open(drm_device_t * dev, drm_file_t * filp_priv)
Dave Airlie5c9ed832004-08-17 13:10:05 +00003026{
3027 drm_radeon_private_t *dev_priv = dev->dev_private;
Dave Airlied4dbf452004-08-24 11:15:53 +00003028 struct drm_radeon_driver_file_fields *radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003029
Dave Airlie77045dc2005-01-27 09:13:42 +00003030 DRM_DEBUG("\n");
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003031 radeon_priv =
3032 (struct drm_radeon_driver_file_fields *)
3033 drm_alloc(sizeof(*radeon_priv), DRM_MEM_FILES);
3034
Dave Airlied4dbf452004-08-24 11:15:53 +00003035 if (!radeon_priv)
3036 return -ENOMEM;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003037
Dave Airlied4dbf452004-08-24 11:15:53 +00003038 filp_priv->driver_priv = radeon_priv;
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003039
3040 if (dev_priv)
Dave Airlied4dbf452004-08-24 11:15:53 +00003041 radeon_priv->radeon_fb_delta = dev_priv->fb_location;
Dave Airlie5c9ed832004-08-17 13:10:05 +00003042 else
Dave Airlied4dbf452004-08-24 11:15:53 +00003043 radeon_priv->radeon_fb_delta = 0;
3044 return 0;
3045}
3046
Eric Anholtc789ea12005-08-05 03:50:23 +00003047void radeon_driver_postclose(drm_device_t * dev, drm_file_t * filp_priv)
Dave Airlied4dbf452004-08-24 11:15:53 +00003048{
Jon Smirl9f9a8f12004-09-30 21:12:10 +00003049 struct drm_radeon_driver_file_fields *radeon_priv =
3050 filp_priv->driver_priv;
3051
Jon Smirlfa6b1d12004-09-27 19:51:38 +00003052 drm_free(radeon_priv, sizeof(*radeon_priv), DRM_MEM_FILES);
Dave Airlie5c9ed832004-08-17 13:10:05 +00003053}
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003054
3055drm_ioctl_desc_t radeon_ioctls[] = {
Jon Smirlea2c7a82005-08-04 13:15:27 +00003056 [DRM_IOCTL_NR(DRM_RADEON_CP_INIT)] = {radeon_cp_init, 1, 1, 1},
3057 [DRM_IOCTL_NR(DRM_RADEON_CP_START)] = {radeon_cp_start, 1, 1, 1},
3058 [DRM_IOCTL_NR(DRM_RADEON_CP_STOP)] = {radeon_cp_stop, 1, 1, 1},
3059 [DRM_IOCTL_NR(DRM_RADEON_CP_RESET)] = {radeon_cp_reset, 1, 1, 1},
3060 [DRM_IOCTL_NR(DRM_RADEON_CP_IDLE)] = {radeon_cp_idle, 1, 0, 0},
3061 [DRM_IOCTL_NR(DRM_RADEON_CP_RESUME)] = {radeon_cp_resume, 1, 0, 0},
3062 [DRM_IOCTL_NR(DRM_RADEON_RESET)] = {radeon_engine_reset, 1, 0, 0},
3063 [DRM_IOCTL_NR(DRM_RADEON_FULLSCREEN)] = {radeon_fullscreen, 1, 0, 0},
3064 [DRM_IOCTL_NR(DRM_RADEON_SWAP)] = {radeon_cp_swap, 1, 0, 0},
3065 [DRM_IOCTL_NR(DRM_RADEON_CLEAR)] = {radeon_cp_clear, 1, 0, 0},
3066 [DRM_IOCTL_NR(DRM_RADEON_VERTEX)] = {radeon_cp_vertex, 1, 0, 0},
3067 [DRM_IOCTL_NR(DRM_RADEON_INDICES)] = {radeon_cp_indices, 1, 0, 0},
3068 [DRM_IOCTL_NR(DRM_RADEON_TEXTURE)] = {radeon_cp_texture, 1, 0, 0},
3069 [DRM_IOCTL_NR(DRM_RADEON_STIPPLE)] = {radeon_cp_stipple, 1, 0, 0},
3070 [DRM_IOCTL_NR(DRM_RADEON_INDIRECT)] = {radeon_cp_indirect, 1, 1, 1},
3071 [DRM_IOCTL_NR(DRM_RADEON_VERTEX2)] = {radeon_cp_vertex2, 1, 0, 0},
3072 [DRM_IOCTL_NR(DRM_RADEON_CMDBUF)] = {radeon_cp_cmdbuf, 1, 0, 0},
3073 [DRM_IOCTL_NR(DRM_RADEON_GETPARAM)] = {radeon_cp_getparam, 1, 0, 0},
3074 [DRM_IOCTL_NR(DRM_RADEON_FLIP)] = {radeon_cp_flip, 1, 0, 0},
3075 [DRM_IOCTL_NR(DRM_RADEON_ALLOC)] = {radeon_mem_alloc, 1, 0, 0},
3076 [DRM_IOCTL_NR(DRM_RADEON_FREE)] = {radeon_mem_free, 1, 0, 0},
3077 [DRM_IOCTL_NR(DRM_RADEON_INIT_HEAP)] = {radeon_mem_init_heap, 1, 1, 1},
3078 [DRM_IOCTL_NR(DRM_RADEON_IRQ_EMIT)] = {radeon_irq_emit, 1, 0, 0},
3079 [DRM_IOCTL_NR(DRM_RADEON_IRQ_WAIT)] = {radeon_irq_wait, 1, 0, 0},
3080 [DRM_IOCTL_NR(DRM_RADEON_SETPARAM)] = {radeon_cp_setparam, 1, 0, 0},
3081 [DRM_IOCTL_NR(DRM_RADEON_SURF_ALLOC)] = {radeon_surface_alloc, 1, 0, 0},
3082 [DRM_IOCTL_NR(DRM_RADEON_SURF_FREE)] = {radeon_surface_free, 1, 0, 0}
Dave Airlie0d6b7fc2005-02-01 11:08:31 +00003083};
3084
3085int radeon_max_ioctl = DRM_ARRAY_SIZE(radeon_ioctls);