blob: 1cc6bde803e17b624ae30a521fc54d2f65093a4a [file] [log] [blame]
Kevin E Martin0994e632001-01-05 22:57:55 +00001/* radeon_state.c -- State support for Radeon -*- linux-c -*-
2 *
3 * Copyright 2000 VA Linux Systems, Inc., Fremont, California.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 *
25 * Authors:
David Dawes0e5b8d72001-03-19 17:45:52 +000026 * Gareth Hughes <gareth@valinux.com>
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000027 * Kevin E. Martin <martin@valinux.com>
Kevin E Martin0994e632001-01-05 22:57:55 +000028 */
29
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000030#include "radeon.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000031#include "drmP.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000032#include "drm.h"
Jens Owen3903e5a2002-04-09 21:54:56 +000033#include "radeon_drm.h"
34#include "radeon_drv.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000035
36
37/* ================================================================
38 * CP hardware state programming functions
39 */
40
Alan Hourihane74ef13f2002-07-05 08:31:11 +000041static __inline__ void radeon_emit_clip_rect( drm_radeon_private_t *dev_priv,
Kevin E Martin0994e632001-01-05 22:57:55 +000042 drm_clip_rect_t *box )
43{
44 RING_LOCALS;
45
46 DRM_DEBUG( " box: x1=%d y1=%d x2=%d y2=%d\n",
47 box->x1, box->y1, box->x2, box->y2 );
48
Keith Whitwellca81e132002-07-04 11:55:44 +000049 BEGIN_RING( 4 );
50 OUT_RING( CP_PACKET0( RADEON_RE_TOP_LEFT, 0 ) );
Kevin E Martin0994e632001-01-05 22:57:55 +000051 OUT_RING( (box->y1 << 16) | box->x1 );
Keith Whitwellca81e132002-07-04 11:55:44 +000052 OUT_RING( CP_PACKET0( RADEON_RE_WIDTH_HEIGHT, 0 ) );
Michel Daenzer881a9b22002-07-18 23:17:13 +000053 OUT_RING( ((box->y2 - 1) << 16) | (box->x2 - 1) );
Kevin E Martin0994e632001-01-05 22:57:55 +000054 ADVANCE_RING();
55}
56
Keith Whitwell2dcada32002-06-12 15:50:28 +000057/* Emit 1.1 state
58 */
59static void radeon_emit_state( drm_radeon_private_t *dev_priv,
60 drm_radeon_context_regs_t *ctx,
61 drm_radeon_texture_regs_t *tex,
62 unsigned int dirty )
Kevin E Martin0994e632001-01-05 22:57:55 +000063{
Kevin E Martin0994e632001-01-05 22:57:55 +000064 RING_LOCALS;
Alan Hourihane74ef13f2002-07-05 08:31:11 +000065 DRM_DEBUG( "dirty=0x%08x\n", dirty );
Kevin E Martin0994e632001-01-05 22:57:55 +000066
67 if ( dirty & RADEON_UPLOAD_CONTEXT ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +000068 BEGIN_RING( 14 );
69 OUT_RING( CP_PACKET0( RADEON_PP_MISC, 6 ) );
70 OUT_RING( ctx->pp_misc );
71 OUT_RING( ctx->pp_fog_color );
72 OUT_RING( ctx->re_solid_color );
73 OUT_RING( ctx->rb3d_blendcntl );
74 OUT_RING( ctx->rb3d_depthoffset );
75 OUT_RING( ctx->rb3d_depthpitch );
76 OUT_RING( ctx->rb3d_zstencilcntl );
77 OUT_RING( CP_PACKET0( RADEON_PP_CNTL, 2 ) );
78 OUT_RING( ctx->pp_cntl );
79 OUT_RING( ctx->rb3d_cntl );
80 OUT_RING( ctx->rb3d_coloroffset );
81 OUT_RING( CP_PACKET0( RADEON_RB3D_COLORPITCH, 0 ) );
82 OUT_RING( ctx->rb3d_colorpitch );
83 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +000084 }
85
86 if ( dirty & RADEON_UPLOAD_VERTFMT ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +000087 BEGIN_RING( 2 );
88 OUT_RING( CP_PACKET0( RADEON_SE_COORD_FMT, 0 ) );
89 OUT_RING( ctx->se_coord_fmt );
90 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +000091 }
92
93 if ( dirty & RADEON_UPLOAD_LINE ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +000094 BEGIN_RING( 5 );
95 OUT_RING( CP_PACKET0( RADEON_RE_LINE_PATTERN, 1 ) );
96 OUT_RING( ctx->re_line_pattern );
97 OUT_RING( ctx->re_line_state );
98 OUT_RING( CP_PACKET0( RADEON_SE_LINE_WIDTH, 0 ) );
99 OUT_RING( ctx->se_line_width );
100 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000101 }
102
103 if ( dirty & RADEON_UPLOAD_BUMPMAP ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000104 BEGIN_RING( 5 );
105 OUT_RING( CP_PACKET0( RADEON_PP_LUM_MATRIX, 0 ) );
106 OUT_RING( ctx->pp_lum_matrix );
107 OUT_RING( CP_PACKET0( RADEON_PP_ROT_MATRIX_0, 1 ) );
108 OUT_RING( ctx->pp_rot_matrix_0 );
109 OUT_RING( ctx->pp_rot_matrix_1 );
110 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000111 }
112
113 if ( dirty & RADEON_UPLOAD_MASKS ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000114 BEGIN_RING( 4 );
115 OUT_RING( CP_PACKET0( RADEON_RB3D_STENCILREFMASK, 2 ) );
116 OUT_RING( ctx->rb3d_stencilrefmask );
117 OUT_RING( ctx->rb3d_ropcntl );
118 OUT_RING( ctx->rb3d_planemask );
119 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000120 }
121
122 if ( dirty & RADEON_UPLOAD_VIEWPORT ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000123 BEGIN_RING( 7 );
124 OUT_RING( CP_PACKET0( RADEON_SE_VPORT_XSCALE, 5 ) );
125 OUT_RING( ctx->se_vport_xscale );
126 OUT_RING( ctx->se_vport_xoffset );
127 OUT_RING( ctx->se_vport_yscale );
128 OUT_RING( ctx->se_vport_yoffset );
129 OUT_RING( ctx->se_vport_zscale );
130 OUT_RING( ctx->se_vport_zoffset );
131 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000132 }
133
134 if ( dirty & RADEON_UPLOAD_SETUP ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000135 BEGIN_RING( 4 );
136 OUT_RING( CP_PACKET0( RADEON_SE_CNTL, 0 ) );
137 OUT_RING( ctx->se_cntl );
138 OUT_RING( CP_PACKET0( RADEON_SE_CNTL_STATUS, 0 ) );
139 OUT_RING( ctx->se_cntl_status );
140 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000141 }
142
143 if ( dirty & RADEON_UPLOAD_MISC ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000144 BEGIN_RING( 2 );
145 OUT_RING( CP_PACKET0( RADEON_RE_MISC, 0 ) );
146 OUT_RING( ctx->re_misc );
147 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000148 }
149
150 if ( dirty & RADEON_UPLOAD_TEX0 ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000151 BEGIN_RING( 9 );
152 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_0, 5 ) );
153 OUT_RING( tex[0].pp_txfilter );
154 OUT_RING( tex[0].pp_txformat );
155 OUT_RING( tex[0].pp_txoffset );
156 OUT_RING( tex[0].pp_txcblend );
157 OUT_RING( tex[0].pp_txablend );
158 OUT_RING( tex[0].pp_tfactor );
159 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_0, 0 ) );
160 OUT_RING( tex[0].pp_border_color );
161 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000162 }
163
164 if ( dirty & RADEON_UPLOAD_TEX1 ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000165 BEGIN_RING( 9 );
166 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_1, 5 ) );
167 OUT_RING( tex[1].pp_txfilter );
168 OUT_RING( tex[1].pp_txformat );
169 OUT_RING( tex[1].pp_txoffset );
170 OUT_RING( tex[1].pp_txcblend );
171 OUT_RING( tex[1].pp_txablend );
172 OUT_RING( tex[1].pp_tfactor );
173 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_1, 0 ) );
174 OUT_RING( tex[1].pp_border_color );
175 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000176 }
177
178 if ( dirty & RADEON_UPLOAD_TEX2 ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +0000179 BEGIN_RING( 9 );
180 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_2, 5 ) );
181 OUT_RING( tex[2].pp_txfilter );
182 OUT_RING( tex[2].pp_txformat );
183 OUT_RING( tex[2].pp_txoffset );
184 OUT_RING( tex[2].pp_txcblend );
185 OUT_RING( tex[2].pp_txablend );
186 OUT_RING( tex[2].pp_tfactor );
187 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_2, 0 ) );
188 OUT_RING( tex[2].pp_border_color );
189 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000190 }
Kevin E Martin0994e632001-01-05 22:57:55 +0000191}
192
Keith Whitwell2dcada32002-06-12 15:50:28 +0000193/* Emit 1.2 state
194 */
195static void radeon_emit_state2( drm_radeon_private_t *dev_priv,
196 drm_radeon_state_t *state )
David Dawesab87c5d2002-02-14 02:00:26 +0000197{
198 RING_LOCALS;
David Dawesab87c5d2002-02-14 02:00:26 +0000199
Keith Whitwell2dcada32002-06-12 15:50:28 +0000200 if (state->dirty & RADEON_UPLOAD_ZBIAS) {
201 BEGIN_RING( 3 );
202 OUT_RING( CP_PACKET0( RADEON_SE_ZBIAS_FACTOR, 1 ) );
203 OUT_RING( state->context2.se_zbias_factor );
204 OUT_RING( state->context2.se_zbias_constant );
205 ADVANCE_RING();
206 }
David Dawesab87c5d2002-02-14 02:00:26 +0000207
208 radeon_emit_state( dev_priv, &state->context,
209 state->tex, state->dirty );
210}
211
Keith Whitwell2dcada32002-06-12 15:50:28 +0000212/* New (1.3) state mechanism. 3 commands (packet, scalar, vector) in
213 * 1.3 cmdbuffers allow all previous state to be updated as well as
214 * the tcl scalar and vector areas.
215 */
216static struct {
217 int start;
218 int len;
219 const char *name;
220} packet[RADEON_MAX_STATE_PACKETS] = {
221 { RADEON_PP_MISC,7,"RADEON_PP_MISC" },
222 { RADEON_PP_CNTL,3,"RADEON_PP_CNTL" },
223 { RADEON_RB3D_COLORPITCH,1,"RADEON_RB3D_COLORPITCH" },
224 { RADEON_RE_LINE_PATTERN,2,"RADEON_RE_LINE_PATTERN" },
225 { RADEON_SE_LINE_WIDTH,1,"RADEON_SE_LINE_WIDTH" },
226 { RADEON_PP_LUM_MATRIX,1,"RADEON_PP_LUM_MATRIX" },
227 { RADEON_PP_ROT_MATRIX_0,2,"RADEON_PP_ROT_MATRIX_0" },
228 { RADEON_RB3D_STENCILREFMASK,3,"RADEON_RB3D_STENCILREFMASK" },
229 { RADEON_SE_VPORT_XSCALE,6,"RADEON_SE_VPORT_XSCALE" },
230 { RADEON_SE_CNTL,2,"RADEON_SE_CNTL" },
231 { RADEON_SE_CNTL_STATUS,1,"RADEON_SE_CNTL_STATUS" },
232 { RADEON_RE_MISC,1,"RADEON_RE_MISC" },
233 { RADEON_PP_TXFILTER_0,6,"RADEON_PP_TXFILTER_0" },
234 { RADEON_PP_BORDER_COLOR_0,1,"RADEON_PP_BORDER_COLOR_0" },
235 { RADEON_PP_TXFILTER_1,6,"RADEON_PP_TXFILTER_1" },
236 { RADEON_PP_BORDER_COLOR_1,1,"RADEON_PP_BORDER_COLOR_1" },
237 { RADEON_PP_TXFILTER_2,6,"RADEON_PP_TXFILTER_2" },
238 { RADEON_PP_BORDER_COLOR_2,1,"RADEON_PP_BORDER_COLOR_2" },
239 { RADEON_SE_ZBIAS_FACTOR,2,"RADEON_SE_ZBIAS_FACTOR" },
240 { RADEON_SE_TCL_OUTPUT_VTX_FMT,11,"RADEON_SE_TCL_OUTPUT_VTX_FMT" },
241 { RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED,17,"RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED" },
242};
243
244
245
246
247
248
249
250
251
252
Kevin E Martin0994e632001-01-05 22:57:55 +0000253#if RADEON_PERFORMANCE_BOXES
254/* ================================================================
255 * Performance monitoring functions
256 */
257
258static void radeon_clear_box( drm_radeon_private_t *dev_priv,
259 int x, int y, int w, int h,
260 int r, int g, int b )
261{
262 u32 pitch, offset;
263 u32 color;
264 RING_LOCALS;
265
266 switch ( dev_priv->color_fmt ) {
267 case RADEON_COLOR_FORMAT_RGB565:
268 color = (((r & 0xf8) << 8) |
269 ((g & 0xfc) << 3) |
270 ((b & 0xf8) >> 3));
271 break;
272 case RADEON_COLOR_FORMAT_ARGB8888:
273 default:
274 color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
275 break;
276 }
277
278 offset = dev_priv->back_offset;
279 pitch = dev_priv->back_pitch >> 3;
280
281 BEGIN_RING( 6 );
282
283 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
284 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
285 RADEON_GMC_BRUSH_SOLID_COLOR |
286 (dev_priv->color_fmt << 8) |
287 RADEON_GMC_SRC_DATATYPE_COLOR |
288 RADEON_ROP3_P |
289 RADEON_GMC_CLR_CMP_CNTL_DIS );
290
291 OUT_RING( (pitch << 22) | (offset >> 5) );
292 OUT_RING( color );
293
294 OUT_RING( (x << 16) | y );
295 OUT_RING( (w << 16) | h );
296
297 ADVANCE_RING();
298}
299
300static void radeon_cp_performance_boxes( drm_radeon_private_t *dev_priv )
301{
302 if ( atomic_read( &dev_priv->idle_count ) == 0 ) {
303 radeon_clear_box( dev_priv, 64, 4, 8, 8, 0, 255, 0 );
304 } else {
305 atomic_set( &dev_priv->idle_count, 0 );
306 }
307}
308
309#endif
310
311
312/* ================================================================
313 * CP command dispatch functions
314 */
315
Kevin E Martin0994e632001-01-05 22:57:55 +0000316static void radeon_cp_dispatch_clear( drm_device_t *dev,
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000317 drm_radeon_clear_t *clear,
318 drm_radeon_clear_rect_t *depth_boxes )
Kevin E Martin0994e632001-01-05 22:57:55 +0000319{
320 drm_radeon_private_t *dev_priv = dev->dev_private;
321 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +0000322 drm_radeon_depth_clear_t *depth_clear = &dev_priv->depth_clear;
Kevin E Martin0994e632001-01-05 22:57:55 +0000323 int nbox = sarea_priv->nbox;
324 drm_clip_rect_t *pbox = sarea_priv->boxes;
325 unsigned int flags = clear->flags;
David Dawesab87c5d2002-02-14 02:00:26 +0000326 u32 rb3d_cntl = 0, rb3d_stencilrefmask= 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000327 int i;
328 RING_LOCALS;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000329 DRM_DEBUG( "flags = 0x%x\n", flags );
Kevin E Martin0994e632001-01-05 22:57:55 +0000330
Kevin E Martin0994e632001-01-05 22:57:55 +0000331 if ( dev_priv->page_flipping && dev_priv->current_page == 1 ) {
332 unsigned int tmp = flags;
333
334 flags &= ~(RADEON_FRONT | RADEON_BACK);
335 if ( tmp & RADEON_FRONT ) flags |= RADEON_BACK;
336 if ( tmp & RADEON_BACK ) flags |= RADEON_FRONT;
337 }
338
David Dawesab87c5d2002-02-14 02:00:26 +0000339 /* We have to clear the depth and/or stencil buffers by
340 * rendering a quad into just those buffers. Thus, we have to
341 * make sure the 3D engine is configured correctly.
342 */
343 if ( flags & (RADEON_DEPTH | RADEON_STENCIL) ) {
344 rb3d_cntl = depth_clear->rb3d_cntl;
345
346 if ( flags & RADEON_DEPTH ) {
347 rb3d_cntl |= RADEON_Z_ENABLE;
348 } else {
349 rb3d_cntl &= ~RADEON_Z_ENABLE;
350 }
351
352 if ( flags & RADEON_STENCIL ) {
353 rb3d_cntl |= RADEON_STENCIL_ENABLE;
354 rb3d_stencilrefmask = clear->depth_mask; /* misnamed field */
355 } else {
356 rb3d_cntl &= ~RADEON_STENCIL_ENABLE;
357 rb3d_stencilrefmask = 0x00000000;
358 }
359 }
360
Kevin E Martin0994e632001-01-05 22:57:55 +0000361 for ( i = 0 ; i < nbox ; i++ ) {
362 int x = pbox[i].x1;
363 int y = pbox[i].y1;
364 int w = pbox[i].x2 - x;
365 int h = pbox[i].y2 - y;
366
367 DRM_DEBUG( "dispatch clear %d,%d-%d,%d flags 0x%x\n",
368 x, y, w, h, flags );
369
370 if ( flags & (RADEON_FRONT | RADEON_BACK) ) {
371 BEGIN_RING( 4 );
372
373 /* Ensure the 3D stream is idle before doing a
374 * 2D fill to clear the front or back buffer.
375 */
376 RADEON_WAIT_UNTIL_3D_IDLE();
377
378 OUT_RING( CP_PACKET0( RADEON_DP_WRITE_MASK, 0 ) );
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000379 OUT_RING( clear->color_mask );
Kevin E Martin0994e632001-01-05 22:57:55 +0000380
381 ADVANCE_RING();
382
383 /* Make sure we restore the 3D state next time.
384 */
David Dawesab87c5d2002-02-14 02:00:26 +0000385 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000386 }
387
388 if ( flags & RADEON_FRONT ) {
389 BEGIN_RING( 6 );
390
391 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
392 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
393 RADEON_GMC_BRUSH_SOLID_COLOR |
394 (dev_priv->color_fmt << 8) |
395 RADEON_GMC_SRC_DATATYPE_COLOR |
396 RADEON_ROP3_P |
397 RADEON_GMC_CLR_CMP_CNTL_DIS );
398
399 OUT_RING( dev_priv->front_pitch_offset );
400 OUT_RING( clear->clear_color );
401
402 OUT_RING( (x << 16) | y );
403 OUT_RING( (w << 16) | h );
404
405 ADVANCE_RING();
406 }
407
408 if ( flags & RADEON_BACK ) {
409 BEGIN_RING( 6 );
410
411 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
412 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
413 RADEON_GMC_BRUSH_SOLID_COLOR |
414 (dev_priv->color_fmt << 8) |
415 RADEON_GMC_SRC_DATATYPE_COLOR |
416 RADEON_ROP3_P |
417 RADEON_GMC_CLR_CMP_CNTL_DIS );
418
419 OUT_RING( dev_priv->back_pitch_offset );
420 OUT_RING( clear->clear_color );
421
422 OUT_RING( (x << 16) | y );
423 OUT_RING( (w << 16) | h );
424
425 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000426 }
427
David Dawesab87c5d2002-02-14 02:00:26 +0000428 if ( flags & (RADEON_DEPTH | RADEON_STENCIL) ) {
Kevin E Martin0994e632001-01-05 22:57:55 +0000429
David Dawesab87c5d2002-02-14 02:00:26 +0000430 radeon_emit_clip_rect( dev_priv,
431 &sarea_priv->boxes[i] );
Kevin E Martin0994e632001-01-05 22:57:55 +0000432
Keith Whitwell2dcada32002-06-12 15:50:28 +0000433 BEGIN_RING( 28 );
Kevin E Martin0994e632001-01-05 22:57:55 +0000434
435 RADEON_WAIT_UNTIL_2D_IDLE();
436
437 OUT_RING( CP_PACKET0( RADEON_PP_CNTL, 1 ) );
438 OUT_RING( 0x00000000 );
David Dawesab87c5d2002-02-14 02:00:26 +0000439 OUT_RING( rb3d_cntl );
440
441 OUT_RING_REG( RADEON_RB3D_ZSTENCILCNTL,
442 depth_clear->rb3d_zstencilcntl );
443 OUT_RING_REG( RADEON_RB3D_STENCILREFMASK,
444 rb3d_stencilrefmask );
445 OUT_RING_REG( RADEON_RB3D_PLANEMASK,
446 0x00000000 );
447 OUT_RING_REG( RADEON_SE_CNTL,
448 depth_clear->se_cntl );
Kevin E Martin0994e632001-01-05 22:57:55 +0000449
Keith Whitwell2dcada32002-06-12 15:50:28 +0000450 /* Radeon 7500 doesn't like vertices without
451 * color.
452 */
453 OUT_RING( CP_PACKET3( RADEON_3D_DRAW_IMMD, 13 ) );
454 OUT_RING( RADEON_VTX_Z_PRESENT |
455 RADEON_VTX_PKCOLOR_PRESENT);
Kevin E Martin0994e632001-01-05 22:57:55 +0000456 OUT_RING( (RADEON_PRIM_TYPE_RECT_LIST |
457 RADEON_PRIM_WALK_RING |
458 RADEON_MAOS_ENABLE |
459 RADEON_VTX_FMT_RADEON_MODE |
460 (3 << RADEON_NUM_VERTICES_SHIFT)) );
461
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000462 OUT_RING( depth_boxes[i].ui[CLEAR_X1] );
463 OUT_RING( depth_boxes[i].ui[CLEAR_Y1] );
464 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Keith Whitwell2dcada32002-06-12 15:50:28 +0000465 OUT_RING( 0x0 );
Kevin E Martin0994e632001-01-05 22:57:55 +0000466
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000467 OUT_RING( depth_boxes[i].ui[CLEAR_X1] );
468 OUT_RING( depth_boxes[i].ui[CLEAR_Y2] );
469 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Keith Whitwell2dcada32002-06-12 15:50:28 +0000470 OUT_RING( 0x0 );
Kevin E Martin0994e632001-01-05 22:57:55 +0000471
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000472 OUT_RING( depth_boxes[i].ui[CLEAR_X2] );
473 OUT_RING( depth_boxes[i].ui[CLEAR_Y2] );
474 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Keith Whitwell2dcada32002-06-12 15:50:28 +0000475 OUT_RING( 0x0 );
Kevin E Martin0994e632001-01-05 22:57:55 +0000476
477 ADVANCE_RING();
478
479 /* Make sure we restore the 3D state next time.
480 */
David Dawesab87c5d2002-02-14 02:00:26 +0000481 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000482 }
483 }
484
485 /* Increment the clear counter. The client-side 3D driver must
486 * wait on this value before performing the clear ioctl. We
487 * need this because the card's so damned fast...
488 */
489 dev_priv->sarea_priv->last_clear++;
490
491 BEGIN_RING( 4 );
492
493 RADEON_CLEAR_AGE( dev_priv->sarea_priv->last_clear );
494 RADEON_WAIT_UNTIL_IDLE();
495
496 ADVANCE_RING();
497}
498
499static void radeon_cp_dispatch_swap( drm_device_t *dev )
500{
501 drm_radeon_private_t *dev_priv = dev->dev_private;
502 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
503 int nbox = sarea_priv->nbox;
504 drm_clip_rect_t *pbox = sarea_priv->boxes;
505 int i;
506 RING_LOCALS;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000507 DRM_DEBUG( "\n" );
Kevin E Martin0994e632001-01-05 22:57:55 +0000508
Kevin E Martin0994e632001-01-05 22:57:55 +0000509#if RADEON_PERFORMANCE_BOXES
510 /* Do some trivial performance monitoring...
511 */
512 radeon_cp_performance_boxes( dev_priv );
513#endif
514
515 /* Wait for the 3D stream to idle before dispatching the bitblt.
516 * This will prevent data corruption between the two streams.
517 */
518 BEGIN_RING( 2 );
519
520 RADEON_WAIT_UNTIL_3D_IDLE();
521
522 ADVANCE_RING();
523
524 for ( i = 0 ; i < nbox ; i++ ) {
525 int x = pbox[i].x1;
526 int y = pbox[i].y1;
527 int w = pbox[i].x2 - x;
528 int h = pbox[i].y2 - y;
529
530 DRM_DEBUG( "dispatch swap %d,%d-%d,%d\n",
531 x, y, w, h );
532
533 BEGIN_RING( 7 );
534
535 OUT_RING( CP_PACKET3( RADEON_CNTL_BITBLT_MULTI, 5 ) );
536 OUT_RING( RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
537 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
538 RADEON_GMC_BRUSH_NONE |
539 (dev_priv->color_fmt << 8) |
540 RADEON_GMC_SRC_DATATYPE_COLOR |
541 RADEON_ROP3_S |
542 RADEON_DP_SRC_SOURCE_MEMORY |
543 RADEON_GMC_CLR_CMP_CNTL_DIS |
544 RADEON_GMC_WR_MSK_DIS );
Keith Whitwell2dcada32002-06-12 15:50:28 +0000545
546 /* Make this work even if front & back are flipped:
547 */
548 if (dev_priv->current_page == 0) {
549 OUT_RING( dev_priv->back_pitch_offset );
550 OUT_RING( dev_priv->front_pitch_offset );
551 }
552 else {
553 OUT_RING( dev_priv->front_pitch_offset );
554 OUT_RING( dev_priv->back_pitch_offset );
555 }
Kevin E Martin0994e632001-01-05 22:57:55 +0000556
557 OUT_RING( (x << 16) | y );
558 OUT_RING( (x << 16) | y );
559 OUT_RING( (w << 16) | h );
560
561 ADVANCE_RING();
562 }
563
564 /* Increment the frame counter. The client-side 3D driver must
565 * throttle the framerate by waiting for this value before
566 * performing the swapbuffer ioctl.
567 */
568 dev_priv->sarea_priv->last_frame++;
569
570 BEGIN_RING( 4 );
571
572 RADEON_FRAME_AGE( dev_priv->sarea_priv->last_frame );
573 RADEON_WAIT_UNTIL_2D_IDLE();
574
575 ADVANCE_RING();
576}
577
578static void radeon_cp_dispatch_flip( drm_device_t *dev )
579{
580 drm_radeon_private_t *dev_priv = dev->dev_private;
581 RING_LOCALS;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000582 DRM_DEBUG( "page=%d\n", dev_priv->current_page );
Kevin E Martin0994e632001-01-05 22:57:55 +0000583
Kevin E Martin0994e632001-01-05 22:57:55 +0000584#if RADEON_PERFORMANCE_BOXES
585 /* Do some trivial performance monitoring...
586 */
587 radeon_cp_performance_boxes( dev_priv );
588#endif
589
Keith Whitwell2dcada32002-06-12 15:50:28 +0000590 BEGIN_RING( 4 );
Kevin E Martin0994e632001-01-05 22:57:55 +0000591
592 RADEON_WAIT_UNTIL_3D_IDLE();
Keith Whitwell2dcada32002-06-12 15:50:28 +0000593/*
Kevin E Martin0994e632001-01-05 22:57:55 +0000594 RADEON_WAIT_UNTIL_PAGE_FLIPPED();
Keith Whitwell2dcada32002-06-12 15:50:28 +0000595*/
Kevin E Martin0994e632001-01-05 22:57:55 +0000596 OUT_RING( CP_PACKET0( RADEON_CRTC_OFFSET, 0 ) );
597
598 if ( dev_priv->current_page == 0 ) {
599 OUT_RING( dev_priv->back_offset );
600 dev_priv->current_page = 1;
601 } else {
602 OUT_RING( dev_priv->front_offset );
603 dev_priv->current_page = 0;
604 }
605
606 ADVANCE_RING();
607
608 /* Increment the frame counter. The client-side 3D driver must
609 * throttle the framerate by waiting for this value before
610 * performing the swapbuffer ioctl.
611 */
612 dev_priv->sarea_priv->last_frame++;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000613 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
Kevin E Martin0994e632001-01-05 22:57:55 +0000614
615 BEGIN_RING( 2 );
616
617 RADEON_FRAME_AGE( dev_priv->sarea_priv->last_frame );
618
619 ADVANCE_RING();
620}
621
Keith Whitwell2dcada32002-06-12 15:50:28 +0000622static int bad_prim_vertex_nr( int primitive, int nr )
623{
624 switch (primitive & RADEON_PRIM_TYPE_MASK) {
625 case RADEON_PRIM_TYPE_NONE:
626 case RADEON_PRIM_TYPE_POINT:
627 return nr < 1;
628 case RADEON_PRIM_TYPE_LINE:
629 return (nr & 1) || nr == 0;
630 case RADEON_PRIM_TYPE_LINE_STRIP:
631 return nr < 2;
632 case RADEON_PRIM_TYPE_TRI_LIST:
633 case RADEON_PRIM_TYPE_3VRT_POINT_LIST:
634 case RADEON_PRIM_TYPE_3VRT_LINE_LIST:
635 case RADEON_PRIM_TYPE_RECT_LIST:
636 return nr % 3 || nr == 0;
637 case RADEON_PRIM_TYPE_TRI_FAN:
638 case RADEON_PRIM_TYPE_TRI_STRIP:
639 return nr < 3;
640 default:
641 return 1;
642 }
643}
644
645
646
647typedef struct {
648 unsigned int start;
649 unsigned int finish;
650 unsigned int prim;
651 unsigned int numverts;
652 unsigned int offset;
653 unsigned int vc_format;
654} drm_radeon_tcl_prim_t;
David Dawesab87c5d2002-02-14 02:00:26 +0000655
Kevin E Martin0994e632001-01-05 22:57:55 +0000656static void radeon_cp_dispatch_vertex( drm_device_t *dev,
David Dawesab87c5d2002-02-14 02:00:26 +0000657 drm_buf_t *buf,
Keith Whitwell2dcada32002-06-12 15:50:28 +0000658 drm_radeon_tcl_prim_t *prim,
659 drm_clip_rect_t *boxes,
660 int nbox )
661
David Dawesab87c5d2002-02-14 02:00:26 +0000662{
663 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000664 drm_clip_rect_t box;
David Dawesab87c5d2002-02-14 02:00:26 +0000665 int offset = dev_priv->agp_buffers_offset + buf->offset + prim->start;
666 int numverts = (int)prim->numverts;
667 int i = 0;
668 RING_LOCALS;
669
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000670 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d %d verts\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +0000671 prim->prim,
672 prim->vc_format,
673 prim->start,
674 prim->finish,
675 prim->numverts);
David Dawesab87c5d2002-02-14 02:00:26 +0000676
Keith Whitwell2dcada32002-06-12 15:50:28 +0000677 if (bad_prim_vertex_nr( prim->prim, prim->numverts )) {
678 DRM_ERROR( "bad prim %x numverts %d\n",
679 prim->prim, prim->numverts );
Keith Whitwellbaef0862002-03-08 16:03:37 +0000680 return;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000681 }
David Dawesab87c5d2002-02-14 02:00:26 +0000682
683 do {
684 /* Emit the next cliprect */
Keith Whitwell2dcada32002-06-12 15:50:28 +0000685 if ( i < nbox ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000686 if (DRM_COPY_FROM_USER_UNCHECKED( &box, &boxes[i], sizeof(box) ))
Keith Whitwell2dcada32002-06-12 15:50:28 +0000687 return;
688
689 radeon_emit_clip_rect( dev_priv, &box );
David Dawesab87c5d2002-02-14 02:00:26 +0000690 }
691
692 /* Emit the vertex buffer rendering commands */
693 BEGIN_RING( 5 );
694
695 OUT_RING( CP_PACKET3( RADEON_3D_RNDR_GEN_INDX_PRIM, 3 ) );
696 OUT_RING( offset );
697 OUT_RING( numverts );
698 OUT_RING( prim->vc_format );
699 OUT_RING( prim->prim | RADEON_PRIM_WALK_LIST |
700 RADEON_COLOR_ORDER_RGBA |
701 RADEON_VTX_FMT_RADEON_MODE |
702 (numverts << RADEON_NUM_VERTICES_SHIFT) );
703
704 ADVANCE_RING();
705
706 i++;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000707 } while ( i < nbox );
David Dawesab87c5d2002-02-14 02:00:26 +0000708}
709
710
Keith Whitwell2dcada32002-06-12 15:50:28 +0000711
David Dawesab87c5d2002-02-14 02:00:26 +0000712static void radeon_cp_discard_buffer( drm_device_t *dev, drm_buf_t *buf )
Kevin E Martin0994e632001-01-05 22:57:55 +0000713{
714 drm_radeon_private_t *dev_priv = dev->dev_private;
715 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +0000716 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +0000717
Keith Whitwell2dcada32002-06-12 15:50:28 +0000718 buf_priv->age = ++dev_priv->sarea_priv->last_dispatch;
Kevin E Martin0994e632001-01-05 22:57:55 +0000719
David Dawesab87c5d2002-02-14 02:00:26 +0000720 /* Emit the vertex buffer age */
721 BEGIN_RING( 2 );
722 RADEON_DISPATCH_AGE( buf_priv->age );
723 ADVANCE_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +0000724
David Dawesab87c5d2002-02-14 02:00:26 +0000725 buf->pending = 1;
726 buf->used = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +0000727}
728
Kevin E Martin0994e632001-01-05 22:57:55 +0000729static void radeon_cp_dispatch_indirect( drm_device_t *dev,
730 drm_buf_t *buf,
731 int start, int end )
732{
733 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +0000734 RING_LOCALS;
735 DRM_DEBUG( "indirect: buf=%d s=0x%x e=0x%x\n",
736 buf->idx, start, end );
737
Kevin E Martin0994e632001-01-05 22:57:55 +0000738 if ( start != end ) {
739 int offset = (dev_priv->agp_buffers_offset
740 + buf->offset + start);
741 int dwords = (end - start + 3) / sizeof(u32);
742
743 /* Indirect buffer data must be an even number of
744 * dwords, so if we've been given an odd number we must
745 * pad the data with a Type-2 CP packet.
746 */
747 if ( dwords & 1 ) {
748 u32 *data = (u32 *)
749 ((char *)dev_priv->buffers->handle
750 + buf->offset + start);
751 data[dwords++] = RADEON_CP_PACKET2;
752 }
753
Kevin E Martin0994e632001-01-05 22:57:55 +0000754 /* Fire off the indirect buffer */
755 BEGIN_RING( 3 );
756
757 OUT_RING( CP_PACKET0( RADEON_CP_IB_BASE, 1 ) );
758 OUT_RING( offset );
759 OUT_RING( dwords );
760
761 ADVANCE_RING();
762 }
Kevin E Martin0994e632001-01-05 22:57:55 +0000763}
764
Keith Whitwell2dcada32002-06-12 15:50:28 +0000765
Kevin E Martin0994e632001-01-05 22:57:55 +0000766static void radeon_cp_dispatch_indices( drm_device_t *dev,
David Dawesab87c5d2002-02-14 02:00:26 +0000767 drm_buf_t *elt_buf,
Keith Whitwell2dcada32002-06-12 15:50:28 +0000768 drm_radeon_tcl_prim_t *prim,
769 drm_clip_rect_t *boxes,
770 int nbox )
Kevin E Martin0994e632001-01-05 22:57:55 +0000771{
772 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell2dcada32002-06-12 15:50:28 +0000773 drm_clip_rect_t box;
774 int offset = dev_priv->agp_buffers_offset + prim->offset;
Kevin E Martin0994e632001-01-05 22:57:55 +0000775 u32 *data;
776 int dwords;
777 int i = 0;
David Dawesab87c5d2002-02-14 02:00:26 +0000778 int start = prim->start + RADEON_INDEX_PRIM_OFFSET;
779 int count = (prim->finish - start) / sizeof(u16);
Kevin E Martin0994e632001-01-05 22:57:55 +0000780
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000781 DRM_DEBUG("hwprim 0x%x vfmt 0x%x %d..%d offset: %x nr %d\n",
Keith Whitwell2dcada32002-06-12 15:50:28 +0000782 prim->prim,
783 prim->vc_format,
784 prim->start,
785 prim->finish,
786 prim->offset,
787 prim->numverts);
Kevin E Martin0994e632001-01-05 22:57:55 +0000788
Keith Whitwell2dcada32002-06-12 15:50:28 +0000789 if (bad_prim_vertex_nr( prim->prim, count )) {
790 DRM_ERROR( "bad prim %x count %d\n",
791 prim->prim, count );
Keith Whitwellbaef0862002-03-08 16:03:37 +0000792 return;
Kevin E Martin0994e632001-01-05 22:57:55 +0000793 }
794
Keith Whitwell2dcada32002-06-12 15:50:28 +0000795
796 if ( start >= prim->finish ||
797 (prim->start & 0x7) ) {
798 DRM_ERROR( "buffer prim %d\n", prim->prim );
799 return;
800 }
801
802 dwords = (prim->finish - prim->start + 3) / sizeof(u32);
803
804 data = (u32 *)((char *)dev_priv->buffers->handle +
805 elt_buf->offset + prim->start);
806
807 data[0] = CP_PACKET3( RADEON_3D_RNDR_GEN_INDX_PRIM, dwords-2 );
808 data[1] = offset;
809 data[2] = prim->numverts;
810 data[3] = prim->vc_format;
811 data[4] = (prim->prim |
812 RADEON_PRIM_WALK_IND |
813 RADEON_COLOR_ORDER_RGBA |
814 RADEON_VTX_FMT_RADEON_MODE |
815 (count << RADEON_NUM_VERTICES_SHIFT) );
816
817 do {
818 if ( i < nbox ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000819 if (DRM_COPY_FROM_USER_UNCHECKED( &box, &boxes[i], sizeof(box) ))
Keith Whitwell2dcada32002-06-12 15:50:28 +0000820 return;
821
822 radeon_emit_clip_rect( dev_priv, &box );
823 }
824
825 radeon_cp_dispatch_indirect( dev, elt_buf,
826 prim->start,
827 prim->finish );
828
829 i++;
830 } while ( i < nbox );
831
Kevin E Martin0994e632001-01-05 22:57:55 +0000832}
833
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000834#define RADEON_MAX_TEXTURE_SIZE (RADEON_BUFFER_SIZE - 8 * sizeof(u32))
835
836static int radeon_cp_dispatch_texture( drm_device_t *dev,
837 drm_radeon_texture_t *tex,
838 drm_radeon_tex_image_t *image )
Kevin E Martin0994e632001-01-05 22:57:55 +0000839{
840 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +0000841 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +0000842 u32 format;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000843 u32 *buffer;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000844 const u8 *data;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000845 int size, dwords, tex_width, blit_width;
846 u32 y, height;
847 int ret = 0, i;
Kevin E Martin0994e632001-01-05 22:57:55 +0000848 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +0000849
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000850 /* FIXME: Be smarter about this...
851 */
852 buf = radeon_freelist_get( dev );
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000853 if ( !buf ) return DRM_ERR(EAGAIN);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000854
855 DRM_DEBUG( "tex: ofs=0x%x p=%d f=%d x=%hd y=%hd w=%hd h=%hd\n",
856 tex->offset >> 10, tex->pitch, tex->format,
857 image->x, image->y, image->width, image->height );
858
Kevin E Martin0994e632001-01-05 22:57:55 +0000859 /* The compiler won't optimize away a division by a variable,
860 * even if the only legal values are powers of two. Thus, we'll
861 * use a shift instead.
862 */
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000863 switch ( tex->format ) {
864 case RADEON_TXFORMAT_ARGB8888:
865 case RADEON_TXFORMAT_RGBA8888:
Kevin E Martin0994e632001-01-05 22:57:55 +0000866 format = RADEON_COLOR_FORMAT_ARGB8888;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000867 tex_width = tex->width * 4;
868 blit_width = image->width * 4;
Kevin E Martin0994e632001-01-05 22:57:55 +0000869 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000870 case RADEON_TXFORMAT_AI88:
871 case RADEON_TXFORMAT_ARGB1555:
872 case RADEON_TXFORMAT_RGB565:
873 case RADEON_TXFORMAT_ARGB4444:
Kevin E Martin0994e632001-01-05 22:57:55 +0000874 format = RADEON_COLOR_FORMAT_RGB565;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000875 tex_width = tex->width * 2;
876 blit_width = image->width * 2;
Kevin E Martin0994e632001-01-05 22:57:55 +0000877 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000878 case RADEON_TXFORMAT_I8:
879 case RADEON_TXFORMAT_RGB332:
Kevin E Martin0994e632001-01-05 22:57:55 +0000880 format = RADEON_COLOR_FORMAT_CI8;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000881 tex_width = tex->width * 1;
882 blit_width = image->width * 1;
Kevin E Martin0994e632001-01-05 22:57:55 +0000883 break;
884 default:
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000885 DRM_ERROR( "invalid texture format %d\n", tex->format );
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000886 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +0000887 }
888
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000889 DRM_DEBUG( " tex=%dx%d blit=%d\n",
890 tex_width, tex->height, blit_width );
891
Kevin E Martin0994e632001-01-05 22:57:55 +0000892 /* Flush the pixel cache. This ensures no pixel data gets mixed
893 * up with the texture data from the host data blit, otherwise
894 * part of the texture image may be corrupted.
895 */
896 BEGIN_RING( 4 );
897
898 RADEON_FLUSH_CACHE();
899 RADEON_WAIT_UNTIL_IDLE();
900
901 ADVANCE_RING();
902
Michel Daenzer5676a2a2002-06-02 16:00:45 +0000903#ifdef __BIG_ENDIAN
904 /* The Mesa texture functions provide the data in little endian as the
905 * chip wants it, but we need to compensate for the fact that the CP
906 * ring gets byte-swapped
907 */
908 BEGIN_RING( 2 );
909 OUT_RING_REG( RADEON_RBBM_GUICNTL, RADEON_HOST_DATA_SWAP_32BIT );
910 ADVANCE_RING();
911#endif
912
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000913 /* Make a copy of the parameters in case we have to update them
914 * for a multi-pass texture blit.
915 */
916 y = image->y;
917 height = image->height;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000918 data = (const u8 *)image->data;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000919
920 size = height * blit_width;
921
922 if ( size > RADEON_MAX_TEXTURE_SIZE ) {
923 /* Texture image is too large, do a multipass upload */
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000924 ret = DRM_ERR(EAGAIN);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000925
926 /* Adjust the blit size to fit the indirect buffer */
927 height = RADEON_MAX_TEXTURE_SIZE / blit_width;
928 size = height * blit_width;
929
930 /* Update the input parameters for next time */
931 image->y += height;
932 image->height -= height;
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000933 image->data = (const char *)image->data + size;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000934
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000935 if ( DRM_COPY_TO_USER( tex->image, image, sizeof(*image) ) ) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000936 DRM_ERROR( "EFAULT on tex->image\n" );
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000937 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000938 }
David Dawes44aa4d62002-01-27 20:05:42 +0000939 } else if ( size < 4 && size > 0 ) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000940 size = 4;
941 }
942
943 dwords = size / 4;
944
Kevin E Martin0994e632001-01-05 22:57:55 +0000945 /* Dispatch the indirect buffer.
946 */
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000947 buffer = (u32 *)((char *)dev_priv->buffers->handle + buf->offset);
Kevin E Martin0994e632001-01-05 22:57:55 +0000948
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000949 buffer[0] = CP_PACKET3( RADEON_CNTL_HOSTDATA_BLT, dwords + 6 );
950 buffer[1] = (RADEON_GMC_DST_PITCH_OFFSET_CNTL |
951 RADEON_GMC_BRUSH_NONE |
952 (format << 8) |
953 RADEON_GMC_SRC_DATATYPE_COLOR |
954 RADEON_ROP3_S |
955 RADEON_DP_SRC_SOURCE_HOST_DATA |
956 RADEON_GMC_CLR_CMP_CNTL_DIS |
957 RADEON_GMC_WR_MSK_DIS);
958
959 buffer[2] = (tex->pitch << 22) | (tex->offset >> 10);
960 buffer[3] = 0xffffffff;
961 buffer[4] = 0xffffffff;
962 buffer[5] = (y << 16) | image->x;
963 buffer[6] = (height << 16) | image->width;
964 buffer[7] = dwords;
965
966 buffer += 8;
967
968 if ( tex_width >= 32 ) {
969 /* Texture image width is larger than the minimum, so we
970 * can upload it directly.
971 */
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000972 if ( DRM_COPY_FROM_USER( buffer, data, dwords * sizeof(u32) ) ) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000973 DRM_ERROR( "EFAULT on data, %d dwords\n", dwords );
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000974 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000975 }
976 } else {
977 /* Texture image width is less than the minimum, so we
978 * need to pad out each image scanline to the minimum
979 * width.
980 */
981 for ( i = 0 ; i < tex->height ; i++ ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000982 if ( DRM_COPY_FROM_USER( buffer, data, tex_width ) ) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000983 DRM_ERROR( "EFAULT on pad, %d bytes\n",
984 tex_width );
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000985 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000986 }
987 buffer += 8;
988 data += tex_width;
989 }
Kevin E Martin0994e632001-01-05 22:57:55 +0000990 }
991
Alan Hourihane74ef13f2002-07-05 08:31:11 +0000992 buf->pid = DRM_CURRENTPID;
David Dawes0e5b8d72001-03-19 17:45:52 +0000993 buf->used = (dwords + 8) * sizeof(u32);
994
Kevin E Martin0994e632001-01-05 22:57:55 +0000995 radeon_cp_dispatch_indirect( dev, buf, 0, buf->used );
David Dawesab87c5d2002-02-14 02:00:26 +0000996 radeon_cp_discard_buffer( dev, buf );
Kevin E Martin0994e632001-01-05 22:57:55 +0000997
998 /* Flush the pixel cache after the blit completes. This ensures
999 * the texture data is written out to memory before rendering
1000 * continues.
1001 */
1002 BEGIN_RING( 4 );
1003
1004 RADEON_FLUSH_CACHE();
1005 RADEON_WAIT_UNTIL_2D_IDLE();
1006
1007 ADVANCE_RING();
1008
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001009 return ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00001010}
1011
1012static void radeon_cp_dispatch_stipple( drm_device_t *dev, u32 *stipple )
1013{
1014 drm_radeon_private_t *dev_priv = dev->dev_private;
1015 int i;
1016 RING_LOCALS;
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001017 DRM_DEBUG( "\n" );
Kevin E Martin0994e632001-01-05 22:57:55 +00001018
Kevin E Martin0994e632001-01-05 22:57:55 +00001019 BEGIN_RING( 35 );
1020
1021 OUT_RING( CP_PACKET0( RADEON_RE_STIPPLE_ADDR, 0 ) );
1022 OUT_RING( 0x00000000 );
1023
1024 OUT_RING( CP_PACKET0_TABLE( RADEON_RE_STIPPLE_DATA, 31 ) );
1025 for ( i = 0 ; i < 32 ; i++ ) {
1026 OUT_RING( stipple[i] );
1027 }
1028
1029 ADVANCE_RING();
1030}
1031
1032
1033/* ================================================================
1034 * IOCTL functions
1035 */
1036
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001037int radeon_cp_clear( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001038{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001039 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001040 drm_radeon_private_t *dev_priv = dev->dev_private;
1041 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1042 drm_radeon_clear_t clear;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001043 drm_radeon_clear_rect_t depth_boxes[RADEON_NR_SAREA_CLIPRECTS];
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001044 DRM_DEBUG( "\n" );
Kevin E Martin0994e632001-01-05 22:57:55 +00001045
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001046 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001047
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001048 DRM_COPY_FROM_USER_IOCTL( clear, (drm_radeon_clear_t *)data,
1049 sizeof(clear) );
Kevin E Martin0994e632001-01-05 22:57:55 +00001050
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001051 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1052
Kevin E Martin0994e632001-01-05 22:57:55 +00001053 if ( sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS )
1054 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
1055
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001056 if ( DRM_COPY_FROM_USER( &depth_boxes, clear.depth_boxes,
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001057 sarea_priv->nbox * sizeof(depth_boxes[0]) ) )
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001058 return DRM_ERR(EFAULT);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001059
1060 radeon_cp_dispatch_clear( dev, &clear, depth_boxes );
Kevin E Martin0994e632001-01-05 22:57:55 +00001061
Keith Whitwell2dcada32002-06-12 15:50:28 +00001062 COMMIT_RING();
1063 return 0;
1064}
1065
1066
1067
1068/* Not sure why this isn't set all the time:
1069 */
1070static int radeon_do_init_pageflip( drm_device_t *dev )
1071{
1072 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell24025ca2002-07-04 12:03:15 +00001073 RING_LOCALS;
1074
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001075 DRM_DEBUG( "\n" );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001076
Keith Whitwell2dcada32002-06-12 15:50:28 +00001077 dev_priv->crtc_offset_cntl = RADEON_READ( RADEON_CRTC_OFFSET_CNTL );
1078
Keith Whitwell24025ca2002-07-04 12:03:15 +00001079 BEGIN_RING( 4 );
1080 RADEON_WAIT_UNTIL_3D_IDLE();
1081 OUT_RING( CP_PACKET0( RADEON_CRTC_OFFSET_CNTL, 0 ) );
1082 OUT_RING( dev_priv->crtc_offset_cntl | RADEON_CRTC_OFFSET_FLIP_CNTL );
1083 ADVANCE_RING();
1084
Keith Whitwell2dcada32002-06-12 15:50:28 +00001085 dev_priv->page_flipping = 1;
1086 dev_priv->current_page = 0;
Keith Whitwellbb91bc02002-06-27 17:56:39 +00001087 dev_priv->sarea_priv->pfCurrentPage = dev_priv->current_page;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001088
1089 return 0;
1090}
1091
1092int radeon_do_cleanup_pageflip( drm_device_t *dev )
1093{
1094 drm_radeon_private_t *dev_priv = dev->dev_private;
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001095 DRM_DEBUG( "\n" );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001096
Keith Whitwell24025ca2002-07-04 12:03:15 +00001097 if (dev_priv->current_page != 0)
1098 radeon_cp_dispatch_flip( dev );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001099
Keith Whitwell24025ca2002-07-04 12:03:15 +00001100 /* FIXME: If the X server changes screen resolution, it
1101 * clobbers the value of RADEON_CRTC_OFFSET_CNTL, above,
1102 * leading to a flashing efect.
1103 */
Keith Whitwell2dcada32002-06-12 15:50:28 +00001104 dev_priv->page_flipping = 0;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001105 return 0;
1106}
1107
1108/* Swapping and flipping are different operations, need different ioctls.
1109 * They can & should be intermixed to support multiple 3d windows.
1110 */
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001111int radeon_cp_flip( DRM_IOCTL_ARGS )
Keith Whitwell2dcada32002-06-12 15:50:28 +00001112{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001113 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001114 drm_radeon_private_t *dev_priv = dev->dev_private;
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001115 DRM_DEBUG( "\n" );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001116
1117 LOCK_TEST_WITH_RETURN( dev );
1118
1119 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1120
1121 if (!dev_priv->page_flipping)
1122 radeon_do_init_pageflip( dev );
1123
1124 radeon_cp_dispatch_flip( dev );
1125
1126 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001127 return 0;
1128}
1129
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001130int radeon_cp_swap( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001131{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001132 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001133 drm_radeon_private_t *dev_priv = dev->dev_private;
1134 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001135 DRM_DEBUG( "\n" );
Kevin E Martin0994e632001-01-05 22:57:55 +00001136
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001137 LOCK_TEST_WITH_RETURN( dev );
1138
1139 RING_SPACE_TEST_WITH_RETURN( dev_priv );
Gareth Hughes4d2a4452001-01-24 15:34:46 +00001140
Kevin E Martin0994e632001-01-05 22:57:55 +00001141 if ( sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS )
1142 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
1143
Keith Whitwell2dcada32002-06-12 15:50:28 +00001144 radeon_cp_dispatch_swap( dev );
1145 dev_priv->sarea_priv->ctx_owner = 0;
Kevin E Martin0994e632001-01-05 22:57:55 +00001146
Keith Whitwell2dcada32002-06-12 15:50:28 +00001147 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001148 return 0;
1149}
1150
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001151int radeon_cp_vertex( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001152{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001153 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001154 drm_radeon_private_t *dev_priv = dev->dev_private;
David Dawesab87c5d2002-02-14 02:00:26 +00001155 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00001156 drm_device_dma_t *dma = dev->dma;
1157 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00001158 drm_radeon_vertex_t vertex;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001159 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00001160
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001161 LOCK_TEST_WITH_RETURN( dev );
1162
1163 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001164 DRM_ERROR( "%s called with no initialization\n", __func__ );
1165 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001166 }
1167
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001168 DRM_COPY_FROM_USER_IOCTL( vertex, (drm_radeon_vertex_t *)data,
1169 sizeof(vertex) );
Kevin E Martin0994e632001-01-05 22:57:55 +00001170
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001171 DRM_DEBUG( "pid=%d index=%d count=%d discard=%d\n",
1172 DRM_CURRENTPID,
Kevin E Martin0994e632001-01-05 22:57:55 +00001173 vertex.idx, vertex.count, vertex.discard );
1174
1175 if ( vertex.idx < 0 || vertex.idx >= dma->buf_count ) {
1176 DRM_ERROR( "buffer index %d (of %d max)\n",
1177 vertex.idx, dma->buf_count - 1 );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001178 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001179 }
1180 if ( vertex.prim < 0 ||
1181 vertex.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST ) {
1182 DRM_ERROR( "buffer prim %d\n", vertex.prim );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001183 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001184 }
1185
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001186 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1187 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001188
1189 buf = dma->buflist[vertex.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00001190
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001191 if ( buf->pid != DRM_CURRENTPID ) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001192 DRM_ERROR( "process %d using buffer owned by %d\n",
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001193 DRM_CURRENTPID, buf->pid );
1194 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001195 }
1196 if ( buf->pending ) {
1197 DRM_ERROR( "sending pending buffer %d\n", vertex.idx );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001198 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001199 }
1200
Keith Whitwell2dcada32002-06-12 15:50:28 +00001201 /* Build up a prim_t record:
1202 */
Keith Whitwellbaef0862002-03-08 16:03:37 +00001203 if (vertex.count) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001204 buf->used = vertex.count; /* not used? */
1205
Keith Whitwellbaef0862002-03-08 16:03:37 +00001206 if ( sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS ) {
1207 radeon_emit_state( dev_priv,
1208 &sarea_priv->context_state,
1209 sarea_priv->tex_state,
1210 sarea_priv->dirty );
1211
1212 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
1213 RADEON_UPLOAD_TEX1IMAGES |
1214 RADEON_UPLOAD_TEX2IMAGES |
1215 RADEON_REQUIRE_QUIESCENCE);
1216 }
David Dawesab87c5d2002-02-14 02:00:26 +00001217
Keith Whitwellbaef0862002-03-08 16:03:37 +00001218 prim.start = 0;
1219 prim.finish = vertex.count; /* unused */
1220 prim.prim = vertex.prim;
Keith Whitwellbaef0862002-03-08 16:03:37 +00001221 prim.numverts = vertex.count;
1222 prim.vc_format = dev_priv->sarea_priv->vc_format;
1223
Keith Whitwell2dcada32002-06-12 15:50:28 +00001224 radeon_cp_dispatch_vertex( dev, buf, &prim,
1225 dev_priv->sarea_priv->boxes,
1226 dev_priv->sarea_priv->nbox );
David Dawesab87c5d2002-02-14 02:00:26 +00001227 }
1228
David Dawesab87c5d2002-02-14 02:00:26 +00001229 if (vertex.discard) {
Keith Whitwellbaef0862002-03-08 16:03:37 +00001230 radeon_cp_discard_buffer( dev, buf );
David Dawesab87c5d2002-02-14 02:00:26 +00001231 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001232
Keith Whitwell2dcada32002-06-12 15:50:28 +00001233 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001234 return 0;
1235}
1236
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001237int radeon_cp_indices( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001238{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001239 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001240 drm_radeon_private_t *dev_priv = dev->dev_private;
David Dawesab87c5d2002-02-14 02:00:26 +00001241 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
Kevin E Martin0994e632001-01-05 22:57:55 +00001242 drm_device_dma_t *dma = dev->dma;
1243 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00001244 drm_radeon_indices_t elts;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001245 drm_radeon_tcl_prim_t prim;
Kevin E Martin0994e632001-01-05 22:57:55 +00001246 int count;
1247
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001248 LOCK_TEST_WITH_RETURN( dev );
1249
1250 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001251 DRM_ERROR( "%s called with no initialization\n", __func__ );
1252 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001253 }
1254
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001255 DRM_COPY_FROM_USER_IOCTL( elts, (drm_radeon_indices_t *)data,
1256 sizeof(elts) );
Kevin E Martin0994e632001-01-05 22:57:55 +00001257
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001258 DRM_DEBUG( "pid=%d index=%d start=%d end=%d discard=%d\n",
1259 DRM_CURRENTPID,
Kevin E Martin0994e632001-01-05 22:57:55 +00001260 elts.idx, elts.start, elts.end, elts.discard );
1261
1262 if ( elts.idx < 0 || elts.idx >= dma->buf_count ) {
1263 DRM_ERROR( "buffer index %d (of %d max)\n",
1264 elts.idx, dma->buf_count - 1 );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001265 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001266 }
1267 if ( elts.prim < 0 ||
1268 elts.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST ) {
1269 DRM_ERROR( "buffer prim %d\n", elts.prim );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001270 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001271 }
1272
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001273 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1274 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001275
1276 buf = dma->buflist[elts.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00001277
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001278 if ( buf->pid != DRM_CURRENTPID ) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001279 DRM_ERROR( "process %d using buffer owned by %d\n",
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001280 DRM_CURRENTPID, buf->pid );
1281 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001282 }
1283 if ( buf->pending ) {
1284 DRM_ERROR( "sending pending buffer %d\n", elts.idx );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001285 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001286 }
1287
1288 count = (elts.end - elts.start) / sizeof(u16);
1289 elts.start -= RADEON_INDEX_PRIM_OFFSET;
1290
1291 if ( elts.start & 0x7 ) {
1292 DRM_ERROR( "misaligned buffer 0x%x\n", elts.start );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001293 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001294 }
1295 if ( elts.start < buf->used ) {
1296 DRM_ERROR( "no header 0x%x - 0x%x\n", elts.start, buf->used );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001297 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001298 }
1299
1300 buf->used = elts.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00001301
David Dawesab87c5d2002-02-14 02:00:26 +00001302 if ( sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS ) {
1303 radeon_emit_state( dev_priv,
1304 &sarea_priv->context_state,
1305 sarea_priv->tex_state,
1306 sarea_priv->dirty );
1307
1308 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
1309 RADEON_UPLOAD_TEX1IMAGES |
1310 RADEON_UPLOAD_TEX2IMAGES |
1311 RADEON_REQUIRE_QUIESCENCE);
1312 }
1313
1314
1315 /* Build up a prim_t record:
1316 */
1317 prim.start = elts.start;
Keith Whitwellbaef0862002-03-08 16:03:37 +00001318 prim.finish = elts.end;
David Dawesab87c5d2002-02-14 02:00:26 +00001319 prim.prim = elts.prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001320 prim.offset = 0; /* offset from start of dma buffers */
1321 prim.numverts = RADEON_MAX_VB_VERTS; /* duh */
David Dawesab87c5d2002-02-14 02:00:26 +00001322 prim.vc_format = dev_priv->sarea_priv->vc_format;
1323
Keith Whitwell2dcada32002-06-12 15:50:28 +00001324 radeon_cp_dispatch_indices( dev, buf, &prim,
1325 dev_priv->sarea_priv->boxes,
1326 dev_priv->sarea_priv->nbox );
David Dawesab87c5d2002-02-14 02:00:26 +00001327 if (elts.discard) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001328 radeon_cp_discard_buffer( dev, buf );
David Dawesab87c5d2002-02-14 02:00:26 +00001329 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001330
Keith Whitwell2dcada32002-06-12 15:50:28 +00001331 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001332 return 0;
1333}
1334
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001335int radeon_cp_texture( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001336{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001337 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001338 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001339 drm_radeon_texture_t tex;
1340 drm_radeon_tex_image_t image;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001341 int ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00001342
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001343 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001344
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001345 DRM_COPY_FROM_USER_IOCTL( tex, (drm_radeon_texture_t *)data, sizeof(tex) );
Gareth Hughes3a74d3a2001-03-06 04:37:37 +00001346
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001347 if ( tex.image == NULL ) {
1348 DRM_ERROR( "null texture image!\n" );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001349 return DRM_ERR(EINVAL);
David Dawes0e5b8d72001-03-19 17:45:52 +00001350 }
1351
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001352 if ( DRM_COPY_FROM_USER( &image,
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001353 (drm_radeon_tex_image_t *)tex.image,
1354 sizeof(image) ) )
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001355 return DRM_ERR(EFAULT);
David Dawes0e5b8d72001-03-19 17:45:52 +00001356
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001357 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1358 VB_AGE_TEST_WITH_RETURN( dev_priv );
1359
Keith Whitwell2dcada32002-06-12 15:50:28 +00001360 ret = radeon_cp_dispatch_texture( dev, &tex, &image );
1361
1362 COMMIT_RING();
1363 return ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00001364}
1365
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001366int radeon_cp_stipple( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001367{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001368 DRM_DEVICE;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001369 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001370 drm_radeon_stipple_t stipple;
1371 u32 mask[32];
1372
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001373 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001374
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001375 DRM_COPY_FROM_USER_IOCTL( stipple, (drm_radeon_stipple_t *)data,
1376 sizeof(stipple) );
Kevin E Martin0994e632001-01-05 22:57:55 +00001377
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001378 if ( DRM_COPY_FROM_USER( &mask, stipple.mask, 32 * sizeof(u32) ) )
1379 return DRM_ERR(EFAULT);
Kevin E Martin0994e632001-01-05 22:57:55 +00001380
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001381 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1382
Kevin E Martin0994e632001-01-05 22:57:55 +00001383 radeon_cp_dispatch_stipple( dev, mask );
1384
Keith Whitwell2dcada32002-06-12 15:50:28 +00001385 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001386 return 0;
1387}
1388
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001389int radeon_cp_indirect( DRM_IOCTL_ARGS )
Kevin E Martin0994e632001-01-05 22:57:55 +00001390{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001391 DRM_DEVICE;
Kevin E Martin0994e632001-01-05 22:57:55 +00001392 drm_radeon_private_t *dev_priv = dev->dev_private;
1393 drm_device_dma_t *dma = dev->dma;
1394 drm_buf_t *buf;
Kevin E Martin0994e632001-01-05 22:57:55 +00001395 drm_radeon_indirect_t indirect;
1396 RING_LOCALS;
1397
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001398 LOCK_TEST_WITH_RETURN( dev );
1399
1400 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001401 DRM_ERROR( "%s called with no initialization\n", __func__ );
1402 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001403 }
1404
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001405 DRM_COPY_FROM_USER_IOCTL( indirect, (drm_radeon_indirect_t *)data,
1406 sizeof(indirect) );
Kevin E Martin0994e632001-01-05 22:57:55 +00001407
1408 DRM_DEBUG( "indirect: idx=%d s=%d e=%d d=%d\n",
1409 indirect.idx, indirect.start,
1410 indirect.end, indirect.discard );
1411
1412 if ( indirect.idx < 0 || indirect.idx >= dma->buf_count ) {
1413 DRM_ERROR( "buffer index %d (of %d max)\n",
1414 indirect.idx, dma->buf_count - 1 );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001415 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001416 }
1417
1418 buf = dma->buflist[indirect.idx];
Kevin E Martin0994e632001-01-05 22:57:55 +00001419
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001420 if ( buf->pid != DRM_CURRENTPID ) {
Kevin E Martin0994e632001-01-05 22:57:55 +00001421 DRM_ERROR( "process %d using buffer owned by %d\n",
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001422 DRM_CURRENTPID, buf->pid );
1423 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001424 }
1425 if ( buf->pending ) {
1426 DRM_ERROR( "sending pending buffer %d\n", indirect.idx );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001427 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001428 }
1429
1430 if ( indirect.start < buf->used ) {
1431 DRM_ERROR( "reusing indirect: start=0x%x actual=0x%x\n",
1432 indirect.start, buf->used );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001433 return DRM_ERR(EINVAL);
Kevin E Martin0994e632001-01-05 22:57:55 +00001434 }
1435
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001436 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1437 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001438
1439 buf->used = indirect.end;
Kevin E Martin0994e632001-01-05 22:57:55 +00001440
1441 /* Wait for the 3D stream to idle before the indirect buffer
1442 * containing 2D acceleration commands is processed.
1443 */
1444 BEGIN_RING( 2 );
1445
1446 RADEON_WAIT_UNTIL_3D_IDLE();
1447
1448 ADVANCE_RING();
1449
1450 /* Dispatch the indirect buffer full of commands from the
1451 * X server. This is insecure and is thus only available to
1452 * privileged clients.
1453 */
1454 radeon_cp_dispatch_indirect( dev, buf, indirect.start, indirect.end );
David Dawesab87c5d2002-02-14 02:00:26 +00001455 if (indirect.discard) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001456 radeon_cp_discard_buffer( dev, buf );
David Dawesab87c5d2002-02-14 02:00:26 +00001457 }
1458
1459
Keith Whitwell2dcada32002-06-12 15:50:28 +00001460 COMMIT_RING();
David Dawesab87c5d2002-02-14 02:00:26 +00001461 return 0;
1462}
1463
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001464int radeon_cp_vertex2( DRM_IOCTL_ARGS )
David Dawesab87c5d2002-02-14 02:00:26 +00001465{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001466 DRM_DEVICE;
David Dawesab87c5d2002-02-14 02:00:26 +00001467 drm_radeon_private_t *dev_priv = dev->dev_private;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001468 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
David Dawesab87c5d2002-02-14 02:00:26 +00001469 drm_device_dma_t *dma = dev->dma;
1470 drm_buf_t *buf;
David Dawesab87c5d2002-02-14 02:00:26 +00001471 drm_radeon_vertex2_t vertex;
1472 int i;
1473 unsigned char laststate;
1474
1475 LOCK_TEST_WITH_RETURN( dev );
1476
1477 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001478 DRM_ERROR( "%s called with no initialization\n", __func__ );
1479 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00001480 }
1481
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001482 DRM_COPY_FROM_USER_IOCTL( vertex, (drm_radeon_vertex2_t *)data,
1483 sizeof(vertex) );
David Dawesab87c5d2002-02-14 02:00:26 +00001484
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001485 DRM_DEBUG( "pid=%d index=%d discard=%d\n",
1486 DRM_CURRENTPID,
1487 vertex.idx, vertex.discard );
David Dawesab87c5d2002-02-14 02:00:26 +00001488
1489 if ( vertex.idx < 0 || vertex.idx >= dma->buf_count ) {
1490 DRM_ERROR( "buffer index %d (of %d max)\n",
1491 vertex.idx, dma->buf_count - 1 );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001492 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00001493 }
1494
1495 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1496 VB_AGE_TEST_WITH_RETURN( dev_priv );
1497
1498 buf = dma->buflist[vertex.idx];
David Dawesab87c5d2002-02-14 02:00:26 +00001499
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001500 if ( buf->pid != DRM_CURRENTPID ) {
David Dawesab87c5d2002-02-14 02:00:26 +00001501 DRM_ERROR( "process %d using buffer owned by %d\n",
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001502 DRM_CURRENTPID, buf->pid );
1503 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00001504 }
1505
1506 if ( buf->pending ) {
1507 DRM_ERROR( "sending pending buffer %d\n", vertex.idx );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001508 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00001509 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00001510
1511 if (sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001512 return DRM_ERR(EINVAL);
David Dawesab87c5d2002-02-14 02:00:26 +00001513
1514 for (laststate = 0xff, i = 0 ; i < vertex.nr_prims ; i++) {
1515 drm_radeon_prim_t prim;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001516 drm_radeon_tcl_prim_t tclprim;
David Dawesab87c5d2002-02-14 02:00:26 +00001517
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001518 if ( DRM_COPY_FROM_USER( &prim, &vertex.prim[i], sizeof(prim) ) )
1519 return DRM_ERR(EFAULT);
David Dawesab87c5d2002-02-14 02:00:26 +00001520
David Dawesab87c5d2002-02-14 02:00:26 +00001521 if ( prim.stateidx != laststate ) {
1522 drm_radeon_state_t state;
1523
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001524 if ( DRM_COPY_FROM_USER( &state,
David Dawesab87c5d2002-02-14 02:00:26 +00001525 &vertex.state[prim.stateidx],
1526 sizeof(state) ) )
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001527 return DRM_ERR(EFAULT);
David Dawesab87c5d2002-02-14 02:00:26 +00001528
David Dawesab87c5d2002-02-14 02:00:26 +00001529 radeon_emit_state2( dev_priv, &state );
1530
1531 laststate = prim.stateidx;
1532 }
1533
Keith Whitwell2dcada32002-06-12 15:50:28 +00001534 tclprim.start = prim.start;
1535 tclprim.finish = prim.finish;
1536 tclprim.prim = prim.prim;
1537 tclprim.vc_format = prim.vc_format;
David Dawesab87c5d2002-02-14 02:00:26 +00001538
1539 if ( prim.prim & RADEON_PRIM_WALK_IND ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001540 tclprim.offset = prim.numverts * 64;
1541 tclprim.numverts = RADEON_MAX_VB_VERTS; /* duh */
1542
1543 radeon_cp_dispatch_indices( dev, buf, &tclprim,
1544 sarea_priv->boxes,
1545 sarea_priv->nbox);
David Dawesab87c5d2002-02-14 02:00:26 +00001546 } else {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001547 tclprim.numverts = prim.numverts;
1548 tclprim.offset = 0; /* not used */
1549
1550 radeon_cp_dispatch_vertex( dev, buf, &tclprim,
1551 sarea_priv->boxes,
1552 sarea_priv->nbox);
David Dawesab87c5d2002-02-14 02:00:26 +00001553 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00001554
1555 if (sarea_priv->nbox == 1)
1556 sarea_priv->nbox = 0;
David Dawesab87c5d2002-02-14 02:00:26 +00001557 }
1558
1559 if ( vertex.discard ) {
1560 radeon_cp_discard_buffer( dev, buf );
1561 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001562
Keith Whitwell2dcada32002-06-12 15:50:28 +00001563 COMMIT_RING();
Kevin E Martin0994e632001-01-05 22:57:55 +00001564 return 0;
1565}
Keith Whitwell2dcada32002-06-12 15:50:28 +00001566
1567
1568static int radeon_emit_packets(
1569 drm_radeon_private_t *dev_priv,
1570 drm_radeon_cmd_header_t header,
1571 drm_radeon_cmd_buffer_t *cmdbuf )
1572{
1573 int id = (int)header.packet.packet_id;
1574 int sz = packet[id].len;
1575 int reg = packet[id].start;
1576 int *data = (int *)cmdbuf->buf;
1577 RING_LOCALS;
1578
1579 if (sz * sizeof(int) > cmdbuf->bufsz)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001580 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001581
1582 BEGIN_RING(sz+1);
1583 OUT_RING( CP_PACKET0( reg, (sz-1) ) );
1584 OUT_RING_USER_TABLE( data, sz );
1585 ADVANCE_RING();
1586
1587 cmdbuf->buf += sz * sizeof(int);
1588 cmdbuf->bufsz -= sz * sizeof(int);
1589 return 0;
1590}
1591
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001592static __inline__ int radeon_emit_scalars(
Keith Whitwell2dcada32002-06-12 15:50:28 +00001593 drm_radeon_private_t *dev_priv,
1594 drm_radeon_cmd_header_t header,
1595 drm_radeon_cmd_buffer_t *cmdbuf )
1596{
1597 int sz = header.scalars.count;
1598 int *data = (int *)cmdbuf->buf;
1599 int start = header.scalars.offset;
1600 int stride = header.scalars.stride;
1601 RING_LOCALS;
1602
1603 BEGIN_RING( 3+sz );
1604 OUT_RING( CP_PACKET0( RADEON_SE_TCL_SCALAR_INDX_REG, 0 ) );
1605 OUT_RING( start | (stride << RADEON_SCAL_INDX_DWORD_STRIDE_SHIFT));
1606 OUT_RING( CP_PACKET0_TABLE( RADEON_SE_TCL_SCALAR_DATA_REG, sz-1 ) );
1607 OUT_RING_USER_TABLE( data, sz );
1608 ADVANCE_RING();
1609 cmdbuf->buf += sz * sizeof(int);
1610 cmdbuf->bufsz -= sz * sizeof(int);
1611 return 0;
1612}
1613
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001614static __inline__ int radeon_emit_vectors(
Keith Whitwell2dcada32002-06-12 15:50:28 +00001615 drm_radeon_private_t *dev_priv,
1616 drm_radeon_cmd_header_t header,
1617 drm_radeon_cmd_buffer_t *cmdbuf )
1618{
1619 int sz = header.vectors.count;
1620 int *data = (int *)cmdbuf->buf;
1621 int start = header.vectors.offset;
1622 int stride = header.vectors.stride;
1623 RING_LOCALS;
1624
1625 BEGIN_RING( 3+sz );
1626 OUT_RING( CP_PACKET0( RADEON_SE_TCL_VECTOR_INDX_REG, 0 ) );
1627 OUT_RING( start | (stride << RADEON_VEC_INDX_OCTWORD_STRIDE_SHIFT));
1628 OUT_RING( CP_PACKET0_TABLE( RADEON_SE_TCL_VECTOR_DATA_REG, (sz-1) ) );
1629 OUT_RING_USER_TABLE( data, sz );
1630 ADVANCE_RING();
1631
1632 cmdbuf->buf += sz * sizeof(int);
1633 cmdbuf->bufsz -= sz * sizeof(int);
1634 return 0;
1635}
1636
1637
1638static int radeon_emit_packet3( drm_device_t *dev,
1639 drm_radeon_cmd_buffer_t *cmdbuf )
1640{
1641 drm_radeon_private_t *dev_priv = dev->dev_private;
1642 int cmdsz, tmp;
1643 int *cmd = (int *)cmdbuf->buf;
1644 RING_LOCALS;
1645
1646
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001647 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00001648
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001649 if (DRM_GET_USER_UNCHECKED( tmp, &cmd[0]))
1650 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001651
1652 cmdsz = 2 + ((tmp & RADEON_CP_PACKET_COUNT_MASK) >> 16);
1653
1654 if ((tmp & 0xc0000000) != RADEON_CP_PACKET3 ||
1655 cmdsz * 4 > cmdbuf->bufsz)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001656 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001657
1658 BEGIN_RING( cmdsz );
1659 OUT_RING_USER_TABLE( cmd, cmdsz );
1660 ADVANCE_RING();
1661
1662 cmdbuf->buf += cmdsz * 4;
1663 cmdbuf->bufsz -= cmdsz * 4;
1664 return 0;
1665}
1666
1667
1668static int radeon_emit_packet3_cliprect( drm_device_t *dev,
Keith Whitwell33d57132002-08-12 07:26:00 +00001669 drm_radeon_cmd_buffer_t *cmdbuf,
1670 int orig_nbox )
Keith Whitwell2dcada32002-06-12 15:50:28 +00001671{
1672 drm_radeon_private_t *dev_priv = dev->dev_private;
1673 drm_clip_rect_t box;
1674 int cmdsz, tmp;
1675 int *cmd = (int *)cmdbuf->buf;
1676 drm_clip_rect_t *boxes = cmdbuf->boxes;
1677 int i = 0;
1678 RING_LOCALS;
1679
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001680 DRM_DEBUG("\n");
Keith Whitwell2dcada32002-06-12 15:50:28 +00001681
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001682 if (DRM_GET_USER_UNCHECKED( tmp, &cmd[0]))
1683 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001684
1685 cmdsz = 2 + ((tmp & RADEON_CP_PACKET_COUNT_MASK) >> 16);
1686
1687 if ((tmp & 0xc0000000) != RADEON_CP_PACKET3 ||
1688 cmdsz * 4 > cmdbuf->bufsz)
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001689 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001690
Keith Whitwell33d57132002-08-12 07:26:00 +00001691 if (!orig_nbox)
1692 goto out;
1693
Keith Whitwell2dcada32002-06-12 15:50:28 +00001694 do {
1695 if ( i < cmdbuf->nbox ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001696 if (DRM_COPY_FROM_USER_UNCHECKED( &box, &boxes[i], sizeof(box) ))
1697 return DRM_ERR(EFAULT);
Tim Smith8fa8db12002-07-17 08:30:36 +00001698 /* FIXME The second and subsequent times round this loop, send a
1699 * WAIT_UNTIL_3D_IDLE before calling emit_clip_rect(). This
1700 * fixes a lockup on fast machines when sending several
1701 * cliprects with a cmdbuf, as when waving a 2D window over
1702 * a 3D window. Something in the commands from user space
1703 * seems to hang the card when they're sent several times
1704 * in a row. That would be the correct place to fix it but
1705 * this works around it until I can figure that out - Tim Smith */
1706 if ( i ) {
1707 BEGIN_RING( 2 );
1708 RADEON_WAIT_UNTIL_3D_IDLE();
1709 ADVANCE_RING();
1710 }
Keith Whitwell2dcada32002-06-12 15:50:28 +00001711 radeon_emit_clip_rect( dev_priv, &box );
1712 }
1713
1714 BEGIN_RING( cmdsz );
1715 OUT_RING_USER_TABLE( cmd, cmdsz );
1716 ADVANCE_RING();
1717
1718 } while ( ++i < cmdbuf->nbox );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001719 if (cmdbuf->nbox == 1)
1720 cmdbuf->nbox = 0;
1721
Keith Whitwell33d57132002-08-12 07:26:00 +00001722 out:
Keith Whitwell2dcada32002-06-12 15:50:28 +00001723 cmdbuf->buf += cmdsz * 4;
1724 cmdbuf->bufsz -= cmdsz * 4;
1725 return 0;
1726}
1727
1728
1729
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001730int radeon_cp_cmdbuf( DRM_IOCTL_ARGS )
Keith Whitwell2dcada32002-06-12 15:50:28 +00001731{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001732 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001733 drm_radeon_private_t *dev_priv = dev->dev_private;
1734 drm_device_dma_t *dma = dev->dma;
1735 drm_buf_t *buf = 0;
1736 int idx;
1737 drm_radeon_cmd_buffer_t cmdbuf;
1738 drm_radeon_cmd_header_t header;
Keith Whitwell33d57132002-08-12 07:26:00 +00001739 int orig_nbox;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001740
1741 LOCK_TEST_WITH_RETURN( dev );
1742
1743 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001744 DRM_ERROR( "%s called with no initialization\n", __func__ );
1745 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001746 }
1747
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001748 DRM_COPY_FROM_USER_IOCTL( cmdbuf, (drm_radeon_cmd_buffer_t *)data,
1749 sizeof(cmdbuf) );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001750
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001751 DRM_DEBUG( "pid=%d\n", DRM_CURRENTPID );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001752 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1753 VB_AGE_TEST_WITH_RETURN( dev_priv );
1754
1755
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001756 if (DRM_VERIFYAREA_READ( cmdbuf.buf, cmdbuf.bufsz ))
1757 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001758
1759 if (cmdbuf.nbox &&
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001760 DRM_VERIFYAREA_READ(cmdbuf.boxes,
Keith Whitwell2dcada32002-06-12 15:50:28 +00001761 cmdbuf.nbox * sizeof(drm_clip_rect_t)))
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001762 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001763
Keith Whitwell33d57132002-08-12 07:26:00 +00001764 orig_nbox = cmdbuf.nbox;
1765
Keith Whitwell2dcada32002-06-12 15:50:28 +00001766 while ( cmdbuf.bufsz >= sizeof(header) ) {
1767
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001768 if (DRM_GET_USER_UNCHECKED( header.i, (int *)cmdbuf.buf )) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001769 DRM_ERROR("__get_user %p\n", cmdbuf.buf);
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001770 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001771 }
1772
1773 cmdbuf.buf += sizeof(header);
1774 cmdbuf.bufsz -= sizeof(header);
1775
1776 switch (header.header.cmd_type) {
1777 case RADEON_CMD_PACKET:
1778 if (radeon_emit_packets( dev_priv, header, &cmdbuf )) {
1779 DRM_ERROR("radeon_emit_packets failed\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001780 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001781 }
1782 break;
1783
1784 case RADEON_CMD_SCALARS:
1785 if (radeon_emit_scalars( dev_priv, header, &cmdbuf )) {
1786 DRM_ERROR("radeon_emit_scalars failed\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001787 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001788 }
1789 break;
1790
1791 case RADEON_CMD_VECTORS:
1792 if (radeon_emit_vectors( dev_priv, header, &cmdbuf )) {
1793 DRM_ERROR("radeon_emit_vectors failed\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001794 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001795 }
1796 break;
1797
1798 case RADEON_CMD_DMA_DISCARD:
1799 idx = header.dma.buf_idx;
1800 if ( idx < 0 || idx >= dma->buf_count ) {
1801 DRM_ERROR( "buffer index %d (of %d max)\n",
1802 idx, dma->buf_count - 1 );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001803 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001804 }
1805
1806 buf = dma->buflist[idx];
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001807 if ( buf->pid != DRM_CURRENTPID || buf->pending ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001808 DRM_ERROR( "bad buffer\n" );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001809 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001810 }
1811
1812 radeon_cp_discard_buffer( dev, buf );
1813 break;
1814
1815 case RADEON_CMD_PACKET3:
1816 if (radeon_emit_packet3( dev, &cmdbuf )) {
1817 DRM_ERROR("radeon_emit_packet3 failed\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001818 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001819 }
1820 break;
1821
1822 case RADEON_CMD_PACKET3_CLIP:
Keith Whitwell33d57132002-08-12 07:26:00 +00001823 if (radeon_emit_packet3_cliprect( dev, &cmdbuf, orig_nbox )) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001824 DRM_ERROR("radeon_emit_packet3_clip failed\n");
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001825 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001826 }
1827 break;
1828
1829 default:
1830 DRM_ERROR("bad cmd_type %d at %p\n",
1831 header.header.cmd_type,
1832 cmdbuf.buf - sizeof(header));
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001833 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001834 }
1835 }
1836
1837
1838 COMMIT_RING();
1839 return 0;
1840}
1841
1842
1843
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001844int radeon_cp_getparam( DRM_IOCTL_ARGS )
Keith Whitwell2dcada32002-06-12 15:50:28 +00001845{
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001846 DRM_DEVICE;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001847 drm_radeon_private_t *dev_priv = dev->dev_private;
1848 drm_radeon_getparam_t param;
1849 int value;
1850
1851 if ( !dev_priv ) {
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001852 DRM_ERROR( "%s called with no initialization\n", __func__ );
1853 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001854 }
1855
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001856 DRM_COPY_FROM_USER_IOCTL( param, (drm_radeon_getparam_t *)data,
1857 sizeof(param) );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001858
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001859 DRM_DEBUG( "pid=%d\n", DRM_CURRENTPID );
Keith Whitwell2dcada32002-06-12 15:50:28 +00001860
1861 switch( param.param ) {
1862 case RADEON_PARAM_AGP_BUFFER_OFFSET:
1863 value = dev_priv->agp_buffers_offset;
1864 break;
Michel Daenzerfd86ac92002-07-11 20:31:12 +00001865 case RADEON_PARAM_LAST_FRAME:
Michel Daenzerd0ac4e52002-08-11 15:56:44 +00001866 value = GET_SCRATCH( 0 );
Michel Daenzerfd86ac92002-07-11 20:31:12 +00001867 break;
1868 case RADEON_PARAM_LAST_DISPATCH:
Michel Daenzerd0ac4e52002-08-11 15:56:44 +00001869 value = GET_SCRATCH( 1 );
Michel Daenzerfd86ac92002-07-11 20:31:12 +00001870 break;
1871 case RADEON_PARAM_LAST_CLEAR:
Michel Daenzerd0ac4e52002-08-11 15:56:44 +00001872 value = GET_SCRATCH( 2 );
Michel Daenzerfd86ac92002-07-11 20:31:12 +00001873 break;
Keith Whitwell2dcada32002-06-12 15:50:28 +00001874 default:
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001875 return DRM_ERR(EINVAL);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001876 }
1877
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001878 if ( DRM_COPY_TO_USER( param.value, &value, sizeof(int) ) ) {
Keith Whitwell2dcada32002-06-12 15:50:28 +00001879 DRM_ERROR( "copy_to_user\n" );
Alan Hourihane74ef13f2002-07-05 08:31:11 +00001880 return DRM_ERR(EFAULT);
Keith Whitwell2dcada32002-06-12 15:50:28 +00001881 }
1882
1883 return 0;
1884}