blob: 9322292a4f18072b1e17e841c3aa1f850ec47ea6 [file] [log] [blame]
Kevin E Martin0994e632001-01-05 22:57:55 +00001/* radeon_state.c -- State support for Radeon -*- linux-c -*-
2 *
3 * Copyright 2000 VA Linux Systems, Inc., Fremont, California.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 *
25 * Authors:
David Dawes0e5b8d72001-03-19 17:45:52 +000026 * Gareth Hughes <gareth@valinux.com>
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000027 * Kevin E. Martin <martin@valinux.com>
Kevin E Martin0994e632001-01-05 22:57:55 +000028 */
29
30#define __NO_VERSION__
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +000031#include "radeon.h"
Kevin E Martin0994e632001-01-05 22:57:55 +000032#include "drmP.h"
33#include "radeon_drv.h"
34#include "drm.h"
35#include <linux/delay.h>
36
37
38/* ================================================================
39 * CP hardware state programming functions
40 */
41
42static inline void radeon_emit_clip_rect( drm_radeon_private_t *dev_priv,
43 drm_clip_rect_t *box )
44{
45 RING_LOCALS;
46
47 DRM_DEBUG( " box: x1=%d y1=%d x2=%d y2=%d\n",
48 box->x1, box->y1, box->x2, box->y2 );
49
50 BEGIN_RING( 4 );
51
52 OUT_RING( CP_PACKET0( RADEON_RE_TOP_LEFT, 0 ) );
53 OUT_RING( (box->y1 << 16) | box->x1 );
54
55 OUT_RING( CP_PACKET0( RADEON_RE_WIDTH_HEIGHT, 0 ) );
56 OUT_RING( ((box->y2 - 1) << 16) | (box->x2 - 1) );
57
58 ADVANCE_RING();
59}
60
61static inline void radeon_emit_context( drm_radeon_private_t *dev_priv )
62{
63 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
64 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
65 RING_LOCALS;
66 DRM_DEBUG( " %s\n", __FUNCTION__ );
67
68 BEGIN_RING( 14 );
69
70 OUT_RING( CP_PACKET0( RADEON_PP_MISC, 6 ) );
71 OUT_RING( ctx->pp_misc );
72 OUT_RING( ctx->pp_fog_color );
73 OUT_RING( ctx->re_solid_color );
74 OUT_RING( ctx->rb3d_blendcntl );
75 OUT_RING( ctx->rb3d_depthoffset );
76 OUT_RING( ctx->rb3d_depthpitch );
77 OUT_RING( ctx->rb3d_zstencilcntl );
78
79 OUT_RING( CP_PACKET0( RADEON_PP_CNTL, 2 ) );
80 OUT_RING( ctx->pp_cntl );
81 OUT_RING( ctx->rb3d_cntl );
82 OUT_RING( ctx->rb3d_coloroffset );
83
84 OUT_RING( CP_PACKET0( RADEON_RB3D_COLORPITCH, 0 ) );
85 OUT_RING( ctx->rb3d_colorpitch );
86
87 ADVANCE_RING();
88}
89
90static inline void radeon_emit_vertfmt( drm_radeon_private_t *dev_priv )
91{
92 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
93 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
94 RING_LOCALS;
95 DRM_DEBUG( " %s\n", __FUNCTION__ );
96
97 BEGIN_RING( 2 );
98
99 OUT_RING( CP_PACKET0( RADEON_SE_COORD_FMT, 0 ) );
100 OUT_RING( ctx->se_coord_fmt );
101
102 ADVANCE_RING();
103}
104
105static inline void radeon_emit_line( drm_radeon_private_t *dev_priv )
106{
107 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
108 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
109 RING_LOCALS;
110 DRM_DEBUG( " %s\n", __FUNCTION__ );
111
112 BEGIN_RING( 5 );
113
114 OUT_RING( CP_PACKET0( RADEON_RE_LINE_PATTERN, 1 ) );
115 OUT_RING( ctx->re_line_pattern );
116 OUT_RING( ctx->re_line_state );
117
118 OUT_RING( CP_PACKET0( RADEON_SE_LINE_WIDTH, 0 ) );
119 OUT_RING( ctx->se_line_width );
120
121 ADVANCE_RING();
122}
123
124static inline void radeon_emit_bumpmap( drm_radeon_private_t *dev_priv )
125{
126 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
127 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
128 RING_LOCALS;
129 DRM_DEBUG( " %s\n", __FUNCTION__ );
130
131 BEGIN_RING( 5 );
132
133 OUT_RING( CP_PACKET0( RADEON_PP_LUM_MATRIX, 0 ) );
134 OUT_RING( ctx->pp_lum_matrix );
135
136 OUT_RING( CP_PACKET0( RADEON_PP_ROT_MATRIX_0, 1 ) );
137 OUT_RING( ctx->pp_rot_matrix_0 );
138 OUT_RING( ctx->pp_rot_matrix_1 );
139
140 ADVANCE_RING();
141}
142
143static inline void radeon_emit_masks( drm_radeon_private_t *dev_priv )
144{
145 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
146 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
147 RING_LOCALS;
148 DRM_DEBUG( " %s\n", __FUNCTION__ );
149
150 BEGIN_RING( 4 );
151
152 OUT_RING( CP_PACKET0( RADEON_RB3D_STENCILREFMASK, 2 ) );
153 OUT_RING( ctx->rb3d_stencilrefmask );
154 OUT_RING( ctx->rb3d_ropcntl );
155 OUT_RING( ctx->rb3d_planemask );
156
157 ADVANCE_RING();
158}
159
160static inline void radeon_emit_viewport( drm_radeon_private_t *dev_priv )
161{
162 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
163 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
164 RING_LOCALS;
165 DRM_DEBUG( " %s\n", __FUNCTION__ );
166
167 BEGIN_RING( 7 );
168
169 OUT_RING( CP_PACKET0( RADEON_SE_VPORT_XSCALE, 5 ) );
170 OUT_RING( ctx->se_vport_xscale );
171 OUT_RING( ctx->se_vport_xoffset );
172 OUT_RING( ctx->se_vport_yscale );
173 OUT_RING( ctx->se_vport_yoffset );
174 OUT_RING( ctx->se_vport_zscale );
175 OUT_RING( ctx->se_vport_zoffset );
176
177 ADVANCE_RING();
178}
179
180static inline void radeon_emit_setup( drm_radeon_private_t *dev_priv )
181{
182 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
183 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
184 RING_LOCALS;
185 DRM_DEBUG( " %s\n", __FUNCTION__ );
186
187 BEGIN_RING( 4 );
188
189 OUT_RING( CP_PACKET0( RADEON_SE_CNTL, 0 ) );
190 OUT_RING( ctx->se_cntl );
191 OUT_RING( CP_PACKET0( RADEON_SE_CNTL_STATUS, 0 ) );
192 OUT_RING( ctx->se_cntl_status );
193
194 ADVANCE_RING();
195}
196
197static inline void radeon_emit_tcl( drm_radeon_private_t *dev_priv )
198{
199#ifdef TCL_ENABLE
200 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
201 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
202 RING_LOCALS;
203 DRM_DEBUG( " %s\n", __FUNCTION__ );
204
205 BEGIN_RING( 29 );
206
207 OUT_RING( CP_PACKET0( RADEON_SE_TCL_MATERIAL_EMMISSIVE_RED, 27 ) );
208 OUT_RING( ctx->se_tcl_material_emmissive.red );
209 OUT_RING( ctx->se_tcl_material_emmissive.green );
210 OUT_RING( ctx->se_tcl_material_emmissive.blue );
211 OUT_RING( ctx->se_tcl_material_emmissive.alpha );
212 OUT_RING( ctx->se_tcl_material_ambient.red );
213 OUT_RING( ctx->se_tcl_material_ambient.green );
214 OUT_RING( ctx->se_tcl_material_ambient.blue );
215 OUT_RING( ctx->se_tcl_material_ambient.alpha );
216 OUT_RING( ctx->se_tcl_material_diffuse.red );
217 OUT_RING( ctx->se_tcl_material_diffuse.green );
218 OUT_RING( ctx->se_tcl_material_diffuse.blue );
219 OUT_RING( ctx->se_tcl_material_diffuse.alpha );
220 OUT_RING( ctx->se_tcl_material_specular.red );
221 OUT_RING( ctx->se_tcl_material_specular.green );
222 OUT_RING( ctx->se_tcl_material_specular.blue );
223 OUT_RING( ctx->se_tcl_material_specular.alpha );
224 OUT_RING( ctx->se_tcl_shininess );
225 OUT_RING( ctx->se_tcl_output_vtx_fmt );
226 OUT_RING( ctx->se_tcl_output_vtx_sel );
227 OUT_RING( ctx->se_tcl_matrix_select_0 );
228 OUT_RING( ctx->se_tcl_matrix_select_1 );
229 OUT_RING( ctx->se_tcl_ucp_vert_blend_ctl );
230 OUT_RING( ctx->se_tcl_texture_proc_ctl );
231 OUT_RING( ctx->se_tcl_light_model_ctl );
232 for ( i = 0 ; i < 4 ; i++ ) {
233 OUT_RING( ctx->se_tcl_per_light_ctl[i] );
234 }
235
236 ADVANCE_RING();
237#else
238 DRM_ERROR( "TCL not enabled!\n" );
239#endif
240}
241
242static inline void radeon_emit_misc( drm_radeon_private_t *dev_priv )
243{
244 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
245 drm_radeon_context_regs_t *ctx = &sarea_priv->context_state;
246 RING_LOCALS;
247 DRM_DEBUG( " %s\n", __FUNCTION__ );
248
249 BEGIN_RING( 2 );
250
251 OUT_RING( CP_PACKET0( RADEON_RE_MISC, 0 ) );
252 OUT_RING( ctx->re_misc );
253
254 ADVANCE_RING();
255}
256
257static inline void radeon_emit_tex0( drm_radeon_private_t *dev_priv )
258{
259 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
260 drm_radeon_texture_regs_t *tex = &sarea_priv->tex_state[0];
261 RING_LOCALS;
262 DRM_DEBUG( " %s: offset=0x%x\n", __FUNCTION__, tex->pp_txoffset );
263
264 BEGIN_RING( 9 );
265
266 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_0, 5 ) );
267 OUT_RING( tex->pp_txfilter );
268 OUT_RING( tex->pp_txformat );
269 OUT_RING( tex->pp_txoffset );
270 OUT_RING( tex->pp_txcblend );
271 OUT_RING( tex->pp_txablend );
272 OUT_RING( tex->pp_tfactor );
273
274 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_0, 0 ) );
275 OUT_RING( tex->pp_border_color );
276
277 ADVANCE_RING();
278}
279
280static inline void radeon_emit_tex1( drm_radeon_private_t *dev_priv )
281{
282 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
283 drm_radeon_texture_regs_t *tex = &sarea_priv->tex_state[1];
284 RING_LOCALS;
285 DRM_DEBUG( " %s: offset=0x%x\n", __FUNCTION__, tex->pp_txoffset );
286
287 BEGIN_RING( 9 );
288
289 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_1, 5 ) );
290 OUT_RING( tex->pp_txfilter );
291 OUT_RING( tex->pp_txformat );
292 OUT_RING( tex->pp_txoffset );
293 OUT_RING( tex->pp_txcblend );
294 OUT_RING( tex->pp_txablend );
295 OUT_RING( tex->pp_tfactor );
296
297 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_1, 0 ) );
298 OUT_RING( tex->pp_border_color );
299
300 ADVANCE_RING();
301}
302
303static inline void radeon_emit_tex2( drm_radeon_private_t *dev_priv )
304{
305 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
306 drm_radeon_texture_regs_t *tex = &sarea_priv->tex_state[2];
307 RING_LOCALS;
308 DRM_DEBUG( " %s\n", __FUNCTION__ );
309
310 BEGIN_RING( 9 );
311
312 OUT_RING( CP_PACKET0( RADEON_PP_TXFILTER_2, 5 ) );
313 OUT_RING( tex->pp_txfilter );
314 OUT_RING( tex->pp_txformat );
315 OUT_RING( tex->pp_txoffset );
316 OUT_RING( tex->pp_txcblend );
317 OUT_RING( tex->pp_txablend );
318 OUT_RING( tex->pp_tfactor );
319
320 OUT_RING( CP_PACKET0( RADEON_PP_BORDER_COLOR_2, 0 ) );
321 OUT_RING( tex->pp_border_color );
322
323 ADVANCE_RING();
324}
325
326static inline void radeon_emit_state( drm_radeon_private_t *dev_priv )
327{
328 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
329 unsigned int dirty = sarea_priv->dirty;
330
331 DRM_DEBUG( "%s: dirty=0x%08x\n", __FUNCTION__, dirty );
332
333 if ( dirty & RADEON_UPLOAD_CONTEXT ) {
334 radeon_emit_context( dev_priv );
335 sarea_priv->dirty &= ~RADEON_UPLOAD_CONTEXT;
336 }
337
338 if ( dirty & RADEON_UPLOAD_VERTFMT ) {
339 radeon_emit_vertfmt( dev_priv );
340 sarea_priv->dirty &= ~RADEON_UPLOAD_VERTFMT;
341 }
342
343 if ( dirty & RADEON_UPLOAD_LINE ) {
344 radeon_emit_line( dev_priv );
345 sarea_priv->dirty &= ~RADEON_UPLOAD_LINE;
346 }
347
348 if ( dirty & RADEON_UPLOAD_BUMPMAP ) {
349 radeon_emit_bumpmap( dev_priv );
350 sarea_priv->dirty &= ~RADEON_UPLOAD_BUMPMAP;
351 }
352
353 if ( dirty & RADEON_UPLOAD_MASKS ) {
354 radeon_emit_masks( dev_priv );
355 sarea_priv->dirty &= ~RADEON_UPLOAD_MASKS;
356 }
357
358 if ( dirty & RADEON_UPLOAD_VIEWPORT ) {
359 radeon_emit_viewport( dev_priv );
360 sarea_priv->dirty &= ~RADEON_UPLOAD_VIEWPORT;
361 }
362
363 if ( dirty & RADEON_UPLOAD_SETUP ) {
364 radeon_emit_setup( dev_priv );
365 sarea_priv->dirty &= ~RADEON_UPLOAD_SETUP;
366 }
367
368 if ( dirty & RADEON_UPLOAD_TCL ) {
369#ifdef TCL_ENABLE
370 radeon_emit_tcl( dev_priv );
371#endif
372 sarea_priv->dirty &= ~RADEON_UPLOAD_TCL;
373 }
374
375 if ( dirty & RADEON_UPLOAD_MISC ) {
376 radeon_emit_misc( dev_priv );
377 sarea_priv->dirty &= ~RADEON_UPLOAD_MISC;
378 }
379
380 if ( dirty & RADEON_UPLOAD_TEX0 ) {
381 radeon_emit_tex0( dev_priv );
382 sarea_priv->dirty &= ~RADEON_UPLOAD_TEX0;
383 }
384
385 if ( dirty & RADEON_UPLOAD_TEX1 ) {
386 radeon_emit_tex1( dev_priv );
387 sarea_priv->dirty &= ~RADEON_UPLOAD_TEX1;
388 }
389
390 if ( dirty & RADEON_UPLOAD_TEX2 ) {
391#if 0
392 radeon_emit_tex2( dev_priv );
393#endif
394 sarea_priv->dirty &= ~RADEON_UPLOAD_TEX2;
395 }
396
397 sarea_priv->dirty &= ~(RADEON_UPLOAD_TEX0IMAGES |
398 RADEON_UPLOAD_TEX1IMAGES |
399 RADEON_UPLOAD_TEX2IMAGES |
400 RADEON_REQUIRE_QUIESCENCE);
401}
402
403
404#if RADEON_PERFORMANCE_BOXES
405/* ================================================================
406 * Performance monitoring functions
407 */
408
409static void radeon_clear_box( drm_radeon_private_t *dev_priv,
410 int x, int y, int w, int h,
411 int r, int g, int b )
412{
413 u32 pitch, offset;
414 u32 color;
415 RING_LOCALS;
416
417 switch ( dev_priv->color_fmt ) {
418 case RADEON_COLOR_FORMAT_RGB565:
419 color = (((r & 0xf8) << 8) |
420 ((g & 0xfc) << 3) |
421 ((b & 0xf8) >> 3));
422 break;
423 case RADEON_COLOR_FORMAT_ARGB8888:
424 default:
425 color = (((0xff) << 24) | (r << 16) | (g << 8) | b);
426 break;
427 }
428
429 offset = dev_priv->back_offset;
430 pitch = dev_priv->back_pitch >> 3;
431
432 BEGIN_RING( 6 );
433
434 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
435 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
436 RADEON_GMC_BRUSH_SOLID_COLOR |
437 (dev_priv->color_fmt << 8) |
438 RADEON_GMC_SRC_DATATYPE_COLOR |
439 RADEON_ROP3_P |
440 RADEON_GMC_CLR_CMP_CNTL_DIS );
441
442 OUT_RING( (pitch << 22) | (offset >> 5) );
443 OUT_RING( color );
444
445 OUT_RING( (x << 16) | y );
446 OUT_RING( (w << 16) | h );
447
448 ADVANCE_RING();
449}
450
451static void radeon_cp_performance_boxes( drm_radeon_private_t *dev_priv )
452{
453 if ( atomic_read( &dev_priv->idle_count ) == 0 ) {
454 radeon_clear_box( dev_priv, 64, 4, 8, 8, 0, 255, 0 );
455 } else {
456 atomic_set( &dev_priv->idle_count, 0 );
457 }
458}
459
460#endif
461
462
463/* ================================================================
464 * CP command dispatch functions
465 */
466
467static void radeon_print_dirty( const char *msg, unsigned int flags )
468{
469 DRM_DEBUG( "%s: (0x%x) %s%s%s%s%s%s%s%s%s%s%s%s%s%s\n",
470 msg,
471 flags,
472 (flags & RADEON_UPLOAD_CONTEXT) ? "context, " : "",
473 (flags & RADEON_UPLOAD_VERTFMT) ? "vertfmt, " : "",
474 (flags & RADEON_UPLOAD_LINE) ? "line, " : "",
475 (flags & RADEON_UPLOAD_BUMPMAP) ? "bumpmap, " : "",
476 (flags & RADEON_UPLOAD_MASKS) ? "masks, " : "",
477 (flags & RADEON_UPLOAD_VIEWPORT) ? "viewport, " : "",
478 (flags & RADEON_UPLOAD_SETUP) ? "setup, " : "",
479 (flags & RADEON_UPLOAD_TCL) ? "tcl, " : "",
480 (flags & RADEON_UPLOAD_MISC) ? "misc, " : "",
481 (flags & RADEON_UPLOAD_TEX0) ? "tex0, " : "",
482 (flags & RADEON_UPLOAD_TEX1) ? "tex1, " : "",
483 (flags & RADEON_UPLOAD_TEX2) ? "tex2, " : "",
484 (flags & RADEON_UPLOAD_CLIPRECTS) ? "cliprects, " : "",
485 (flags & RADEON_REQUIRE_QUIESCENCE) ? "quiescence, " : "" );
486}
487
488static void radeon_cp_dispatch_clear( drm_device_t *dev,
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000489 drm_radeon_clear_t *clear,
490 drm_radeon_clear_rect_t *depth_boxes )
Kevin E Martin0994e632001-01-05 22:57:55 +0000491{
492 drm_radeon_private_t *dev_priv = dev->dev_private;
493 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
494 int nbox = sarea_priv->nbox;
495 drm_clip_rect_t *pbox = sarea_priv->boxes;
496 unsigned int flags = clear->flags;
497 int i;
498 RING_LOCALS;
499 DRM_DEBUG( "%s\n", __FUNCTION__ );
500
Kevin E Martin0994e632001-01-05 22:57:55 +0000501 if ( dev_priv->page_flipping && dev_priv->current_page == 1 ) {
502 unsigned int tmp = flags;
503
504 flags &= ~(RADEON_FRONT | RADEON_BACK);
505 if ( tmp & RADEON_FRONT ) flags |= RADEON_BACK;
506 if ( tmp & RADEON_BACK ) flags |= RADEON_FRONT;
507 }
508
509 for ( i = 0 ; i < nbox ; i++ ) {
510 int x = pbox[i].x1;
511 int y = pbox[i].y1;
512 int w = pbox[i].x2 - x;
513 int h = pbox[i].y2 - y;
514
515 DRM_DEBUG( "dispatch clear %d,%d-%d,%d flags 0x%x\n",
516 x, y, w, h, flags );
517
518 if ( flags & (RADEON_FRONT | RADEON_BACK) ) {
519 BEGIN_RING( 4 );
520
521 /* Ensure the 3D stream is idle before doing a
522 * 2D fill to clear the front or back buffer.
523 */
524 RADEON_WAIT_UNTIL_3D_IDLE();
525
526 OUT_RING( CP_PACKET0( RADEON_DP_WRITE_MASK, 0 ) );
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000527 OUT_RING( clear->color_mask );
Kevin E Martin0994e632001-01-05 22:57:55 +0000528
529 ADVANCE_RING();
530
531 /* Make sure we restore the 3D state next time.
532 */
533 dev_priv->sarea_priv->dirty |= (RADEON_UPLOAD_CONTEXT |
534 RADEON_UPLOAD_MASKS);
535 }
536
537 if ( flags & RADEON_FRONT ) {
538 BEGIN_RING( 6 );
539
540 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
541 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
542 RADEON_GMC_BRUSH_SOLID_COLOR |
543 (dev_priv->color_fmt << 8) |
544 RADEON_GMC_SRC_DATATYPE_COLOR |
545 RADEON_ROP3_P |
546 RADEON_GMC_CLR_CMP_CNTL_DIS );
547
548 OUT_RING( dev_priv->front_pitch_offset );
549 OUT_RING( clear->clear_color );
550
551 OUT_RING( (x << 16) | y );
552 OUT_RING( (w << 16) | h );
553
554 ADVANCE_RING();
555 }
556
557 if ( flags & RADEON_BACK ) {
558 BEGIN_RING( 6 );
559
560 OUT_RING( CP_PACKET3( RADEON_CNTL_PAINT_MULTI, 4 ) );
561 OUT_RING( RADEON_GMC_DST_PITCH_OFFSET_CNTL |
562 RADEON_GMC_BRUSH_SOLID_COLOR |
563 (dev_priv->color_fmt << 8) |
564 RADEON_GMC_SRC_DATATYPE_COLOR |
565 RADEON_ROP3_P |
566 RADEON_GMC_CLR_CMP_CNTL_DIS );
567
568 OUT_RING( dev_priv->back_pitch_offset );
569 OUT_RING( clear->clear_color );
570
571 OUT_RING( (x << 16) | y );
572 OUT_RING( (w << 16) | h );
573
574 ADVANCE_RING();
575
576 }
577
578 if ( flags & RADEON_DEPTH ) {
579 drm_radeon_depth_clear_t *depth_clear =
580 &dev_priv->depth_clear;
581
582 if ( sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS ) {
583 radeon_emit_state( dev_priv );
584 }
585
586 /* FIXME: Render a rectangle to clear the depth
587 * buffer. So much for those "fast Z clears"...
588 */
589 BEGIN_RING( 23 );
590
591 RADEON_WAIT_UNTIL_2D_IDLE();
592
593 OUT_RING( CP_PACKET0( RADEON_PP_CNTL, 1 ) );
594 OUT_RING( 0x00000000 );
595 OUT_RING( depth_clear->rb3d_cntl );
596 OUT_RING( CP_PACKET0( RADEON_RB3D_ZSTENCILCNTL, 0 ) );
597 OUT_RING( depth_clear->rb3d_zstencilcntl );
598 OUT_RING( CP_PACKET0( RADEON_RB3D_PLANEMASK, 0 ) );
599 OUT_RING( 0x00000000 );
600 OUT_RING( CP_PACKET0( RADEON_SE_CNTL, 0 ) );
601 OUT_RING( depth_clear->se_cntl );
602
603 OUT_RING( CP_PACKET3( RADEON_3D_DRAW_IMMD, 10 ) );
604 OUT_RING( RADEON_VTX_Z_PRESENT );
605 OUT_RING( (RADEON_PRIM_TYPE_RECT_LIST |
606 RADEON_PRIM_WALK_RING |
607 RADEON_MAOS_ENABLE |
608 RADEON_VTX_FMT_RADEON_MODE |
609 (3 << RADEON_NUM_VERTICES_SHIFT)) );
610
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000611 OUT_RING( depth_boxes[i].ui[CLEAR_X1] );
612 OUT_RING( depth_boxes[i].ui[CLEAR_Y1] );
613 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Kevin E Martin0994e632001-01-05 22:57:55 +0000614
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000615 OUT_RING( depth_boxes[i].ui[CLEAR_X1] );
616 OUT_RING( depth_boxes[i].ui[CLEAR_Y2] );
617 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Kevin E Martin0994e632001-01-05 22:57:55 +0000618
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000619 OUT_RING( depth_boxes[i].ui[CLEAR_X2] );
620 OUT_RING( depth_boxes[i].ui[CLEAR_Y2] );
621 OUT_RING( depth_boxes[i].ui[CLEAR_DEPTH] );
Kevin E Martin0994e632001-01-05 22:57:55 +0000622
623 ADVANCE_RING();
624
625 /* Make sure we restore the 3D state next time.
626 */
627 dev_priv->sarea_priv->dirty |= (RADEON_UPLOAD_CONTEXT |
628 RADEON_UPLOAD_SETUP |
629 RADEON_UPLOAD_MASKS);
630 }
631 }
632
633 /* Increment the clear counter. The client-side 3D driver must
634 * wait on this value before performing the clear ioctl. We
635 * need this because the card's so damned fast...
636 */
637 dev_priv->sarea_priv->last_clear++;
638
639 BEGIN_RING( 4 );
640
641 RADEON_CLEAR_AGE( dev_priv->sarea_priv->last_clear );
642 RADEON_WAIT_UNTIL_IDLE();
643
644 ADVANCE_RING();
645}
646
647static void radeon_cp_dispatch_swap( drm_device_t *dev )
648{
649 drm_radeon_private_t *dev_priv = dev->dev_private;
650 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
651 int nbox = sarea_priv->nbox;
652 drm_clip_rect_t *pbox = sarea_priv->boxes;
653 int i;
654 RING_LOCALS;
655 DRM_DEBUG( "%s\n", __FUNCTION__ );
656
Kevin E Martin0994e632001-01-05 22:57:55 +0000657#if RADEON_PERFORMANCE_BOXES
658 /* Do some trivial performance monitoring...
659 */
660 radeon_cp_performance_boxes( dev_priv );
661#endif
662
663 /* Wait for the 3D stream to idle before dispatching the bitblt.
664 * This will prevent data corruption between the two streams.
665 */
666 BEGIN_RING( 2 );
667
668 RADEON_WAIT_UNTIL_3D_IDLE();
669
670 ADVANCE_RING();
671
672 for ( i = 0 ; i < nbox ; i++ ) {
673 int x = pbox[i].x1;
674 int y = pbox[i].y1;
675 int w = pbox[i].x2 - x;
676 int h = pbox[i].y2 - y;
677
678 DRM_DEBUG( "dispatch swap %d,%d-%d,%d\n",
679 x, y, w, h );
680
681 BEGIN_RING( 7 );
682
683 OUT_RING( CP_PACKET3( RADEON_CNTL_BITBLT_MULTI, 5 ) );
684 OUT_RING( RADEON_GMC_SRC_PITCH_OFFSET_CNTL |
685 RADEON_GMC_DST_PITCH_OFFSET_CNTL |
686 RADEON_GMC_BRUSH_NONE |
687 (dev_priv->color_fmt << 8) |
688 RADEON_GMC_SRC_DATATYPE_COLOR |
689 RADEON_ROP3_S |
690 RADEON_DP_SRC_SOURCE_MEMORY |
691 RADEON_GMC_CLR_CMP_CNTL_DIS |
692 RADEON_GMC_WR_MSK_DIS );
693
694 OUT_RING( dev_priv->back_pitch_offset );
695 OUT_RING( dev_priv->front_pitch_offset );
696
697 OUT_RING( (x << 16) | y );
698 OUT_RING( (x << 16) | y );
699 OUT_RING( (w << 16) | h );
700
701 ADVANCE_RING();
702 }
703
704 /* Increment the frame counter. The client-side 3D driver must
705 * throttle the framerate by waiting for this value before
706 * performing the swapbuffer ioctl.
707 */
708 dev_priv->sarea_priv->last_frame++;
709
710 BEGIN_RING( 4 );
711
712 RADEON_FRAME_AGE( dev_priv->sarea_priv->last_frame );
713 RADEON_WAIT_UNTIL_2D_IDLE();
714
715 ADVANCE_RING();
716}
717
718static void radeon_cp_dispatch_flip( drm_device_t *dev )
719{
720 drm_radeon_private_t *dev_priv = dev->dev_private;
721 RING_LOCALS;
722 DRM_DEBUG( "%s: page=%d\n", __FUNCTION__, dev_priv->current_page );
723
Kevin E Martin0994e632001-01-05 22:57:55 +0000724#if RADEON_PERFORMANCE_BOXES
725 /* Do some trivial performance monitoring...
726 */
727 radeon_cp_performance_boxes( dev_priv );
728#endif
729
730 BEGIN_RING( 6 );
731
732 RADEON_WAIT_UNTIL_3D_IDLE();
733 RADEON_WAIT_UNTIL_PAGE_FLIPPED();
734
735 OUT_RING( CP_PACKET0( RADEON_CRTC_OFFSET, 0 ) );
736
737 if ( dev_priv->current_page == 0 ) {
738 OUT_RING( dev_priv->back_offset );
739 dev_priv->current_page = 1;
740 } else {
741 OUT_RING( dev_priv->front_offset );
742 dev_priv->current_page = 0;
743 }
744
745 ADVANCE_RING();
746
747 /* Increment the frame counter. The client-side 3D driver must
748 * throttle the framerate by waiting for this value before
749 * performing the swapbuffer ioctl.
750 */
751 dev_priv->sarea_priv->last_frame++;
752
753 BEGIN_RING( 2 );
754
755 RADEON_FRAME_AGE( dev_priv->sarea_priv->last_frame );
756
757 ADVANCE_RING();
758}
759
760static void radeon_cp_dispatch_vertex( drm_device_t *dev,
761 drm_buf_t *buf )
762{
763 drm_radeon_private_t *dev_priv = dev->dev_private;
764 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
765 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
766 int format = sarea_priv->vc_format;
767 int offset = dev_priv->agp_buffers_offset + buf->offset;
768 int size = buf->used;
769 int prim = buf_priv->prim;
770 int i = 0;
771 RING_LOCALS;
772 DRM_DEBUG( "%s: nbox=%d\n", __FUNCTION__, sarea_priv->nbox );
773
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000774 if ( 0 )
Kevin E Martin0994e632001-01-05 22:57:55 +0000775 radeon_print_dirty( "dispatch_vertex", sarea_priv->dirty );
776
777 if ( buf->used ) {
778 buf_priv->dispatched = 1;
779
780 if ( sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS ) {
781 radeon_emit_state( dev_priv );
782 }
783
784 do {
785 /* Emit the next set of up to three cliprects */
786 if ( i < sarea_priv->nbox ) {
787 radeon_emit_clip_rect( dev_priv,
788 &sarea_priv->boxes[i] );
789 }
790
791 /* Emit the vertex buffer rendering commands */
792 BEGIN_RING( 5 );
793
794 OUT_RING( CP_PACKET3( RADEON_3D_RNDR_GEN_INDX_PRIM, 3 ) );
795 OUT_RING( offset );
796 OUT_RING( size );
797 OUT_RING( format );
798 OUT_RING( prim | RADEON_PRIM_WALK_LIST |
799 RADEON_COLOR_ORDER_RGBA |
800 RADEON_VTX_FMT_RADEON_MODE |
801 (size << RADEON_NUM_VERTICES_SHIFT) );
802
803 ADVANCE_RING();
804
805 i++;
806 } while ( i < sarea_priv->nbox );
807 }
808
809 if ( buf_priv->discard ) {
810 buf_priv->age = dev_priv->sarea_priv->last_dispatch;
811
812 /* Emit the vertex buffer age */
813 BEGIN_RING( 2 );
814 RADEON_DISPATCH_AGE( buf_priv->age );
815 ADVANCE_RING();
816
817 buf->pending = 1;
818 buf->used = 0;
819 /* FIXME: Check dispatched field */
820 buf_priv->dispatched = 0;
821 }
822
823 dev_priv->sarea_priv->last_dispatch++;
824
825 sarea_priv->dirty &= ~RADEON_UPLOAD_CLIPRECTS;
826 sarea_priv->nbox = 0;
827}
828
829
830static void radeon_cp_dispatch_indirect( drm_device_t *dev,
831 drm_buf_t *buf,
832 int start, int end )
833{
834 drm_radeon_private_t *dev_priv = dev->dev_private;
835 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
836 RING_LOCALS;
837 DRM_DEBUG( "indirect: buf=%d s=0x%x e=0x%x\n",
838 buf->idx, start, end );
839
Kevin E Martin0994e632001-01-05 22:57:55 +0000840 if ( start != end ) {
841 int offset = (dev_priv->agp_buffers_offset
842 + buf->offset + start);
843 int dwords = (end - start + 3) / sizeof(u32);
844
845 /* Indirect buffer data must be an even number of
846 * dwords, so if we've been given an odd number we must
847 * pad the data with a Type-2 CP packet.
848 */
849 if ( dwords & 1 ) {
850 u32 *data = (u32 *)
851 ((char *)dev_priv->buffers->handle
852 + buf->offset + start);
853 data[dwords++] = RADEON_CP_PACKET2;
854 }
855
856 buf_priv->dispatched = 1;
857
858 /* Fire off the indirect buffer */
859 BEGIN_RING( 3 );
860
861 OUT_RING( CP_PACKET0( RADEON_CP_IB_BASE, 1 ) );
862 OUT_RING( offset );
863 OUT_RING( dwords );
864
865 ADVANCE_RING();
866 }
867
868 if ( buf_priv->discard ) {
869 buf_priv->age = dev_priv->sarea_priv->last_dispatch;
870
871 /* Emit the indirect buffer age */
872 BEGIN_RING( 2 );
873 RADEON_DISPATCH_AGE( buf_priv->age );
874 ADVANCE_RING();
875
876 buf->pending = 1;
877 buf->used = 0;
878 /* FIXME: Check dispatched field */
879 buf_priv->dispatched = 0;
880 }
881
882 dev_priv->sarea_priv->last_dispatch++;
883}
884
885static void radeon_cp_dispatch_indices( drm_device_t *dev,
886 drm_buf_t *buf,
887 int start, int end,
888 int count )
889{
890 drm_radeon_private_t *dev_priv = dev->dev_private;
891 drm_radeon_buf_priv_t *buf_priv = buf->dev_private;
892 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
893 int format = sarea_priv->vc_format;
894 int offset = dev_priv->agp_buffers_offset;
895 int prim = buf_priv->prim;
896 u32 *data;
897 int dwords;
898 int i = 0;
899 RING_LOCALS;
900 DRM_DEBUG( "indices: s=%d e=%d c=%d\n", start, end, count );
901
Kevin E Martin0994e632001-01-05 22:57:55 +0000902 if ( 0 )
903 radeon_print_dirty( "dispatch_indices", sarea_priv->dirty );
904
905 if ( start != end ) {
906 buf_priv->dispatched = 1;
907
908 if ( sarea_priv->dirty & ~RADEON_UPLOAD_CLIPRECTS ) {
909 radeon_emit_state( dev_priv );
910 }
911
912 dwords = (end - start + 3) / sizeof(u32);
913
914 data = (u32 *)((char *)dev_priv->buffers->handle
915 + buf->offset + start);
916
917 data[0] = CP_PACKET3( RADEON_3D_RNDR_GEN_INDX_PRIM, dwords-2 );
918
919 data[1] = offset;
920 data[2] = RADEON_MAX_VB_VERTS;
921 data[3] = format;
922 data[4] = (prim | RADEON_PRIM_WALK_IND |
923 RADEON_COLOR_ORDER_RGBA |
924 RADEON_VTX_FMT_RADEON_MODE |
925 (count << RADEON_NUM_VERTICES_SHIFT) );
926
927 if ( count & 0x1 ) {
928 data[dwords-1] &= 0x0000ffff;
929 }
930
931 do {
932 /* Emit the next set of up to three cliprects */
933 if ( i < sarea_priv->nbox ) {
934 radeon_emit_clip_rect( dev_priv,
935 &sarea_priv->boxes[i] );
936 }
937
938 radeon_cp_dispatch_indirect( dev, buf, start, end );
939
940 i++;
941 } while ( i < sarea_priv->nbox );
942 }
943
944 if ( buf_priv->discard ) {
945 buf_priv->age = dev_priv->sarea_priv->last_dispatch;
946
947 /* Emit the vertex buffer age */
948 BEGIN_RING( 2 );
949 RADEON_DISPATCH_AGE( buf_priv->age );
950 ADVANCE_RING();
951
952 buf->pending = 1;
953 /* FIXME: Check dispatched field */
954 buf_priv->dispatched = 0;
955 }
956
957 dev_priv->sarea_priv->last_dispatch++;
958
959 sarea_priv->dirty &= ~RADEON_UPLOAD_CLIPRECTS;
960 sarea_priv->nbox = 0;
961}
962
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000963#define RADEON_MAX_TEXTURE_SIZE (RADEON_BUFFER_SIZE - 8 * sizeof(u32))
964
965static int radeon_cp_dispatch_texture( drm_device_t *dev,
966 drm_radeon_texture_t *tex,
967 drm_radeon_tex_image_t *image )
Kevin E Martin0994e632001-01-05 22:57:55 +0000968{
969 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +0000970 drm_buf_t *buf;
971 drm_radeon_buf_priv_t *buf_priv;
972 u32 format;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000973 u32 *buffer;
974 u8 *data;
975 int size, dwords, tex_width, blit_width;
976 u32 y, height;
977 int ret = 0, i;
Kevin E Martin0994e632001-01-05 22:57:55 +0000978 RING_LOCALS;
Kevin E Martin0994e632001-01-05 22:57:55 +0000979
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000980 /* FIXME: Be smarter about this...
981 */
982 buf = radeon_freelist_get( dev );
983 if ( !buf ) return -EAGAIN;
984
985 DRM_DEBUG( "tex: ofs=0x%x p=%d f=%d x=%hd y=%hd w=%hd h=%hd\n",
986 tex->offset >> 10, tex->pitch, tex->format,
987 image->x, image->y, image->width, image->height );
988
989 buf_priv = buf->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +0000990
991 /* The compiler won't optimize away a division by a variable,
992 * even if the only legal values are powers of two. Thus, we'll
993 * use a shift instead.
994 */
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000995 switch ( tex->format ) {
996 case RADEON_TXFORMAT_ARGB8888:
997 case RADEON_TXFORMAT_RGBA8888:
Kevin E Martin0994e632001-01-05 22:57:55 +0000998 format = RADEON_COLOR_FORMAT_ARGB8888;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +0000999 tex_width = tex->width * 4;
1000 blit_width = image->width * 4;
Kevin E Martin0994e632001-01-05 22:57:55 +00001001 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001002 case RADEON_TXFORMAT_AI88:
1003 case RADEON_TXFORMAT_ARGB1555:
1004 case RADEON_TXFORMAT_RGB565:
1005 case RADEON_TXFORMAT_ARGB4444:
Kevin E Martin0994e632001-01-05 22:57:55 +00001006 format = RADEON_COLOR_FORMAT_RGB565;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001007 tex_width = tex->width * 2;
1008 blit_width = image->width * 2;
Kevin E Martin0994e632001-01-05 22:57:55 +00001009 break;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001010 case RADEON_TXFORMAT_I8:
1011 case RADEON_TXFORMAT_RGB332:
Kevin E Martin0994e632001-01-05 22:57:55 +00001012 format = RADEON_COLOR_FORMAT_CI8;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001013 tex_width = tex->width * 1;
1014 blit_width = image->width * 1;
Kevin E Martin0994e632001-01-05 22:57:55 +00001015 break;
1016 default:
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001017 DRM_ERROR( "invalid texture format %d\n", tex->format );
Kevin E Martin0994e632001-01-05 22:57:55 +00001018 return -EINVAL;
1019 }
1020
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001021 DRM_DEBUG( " tex=%dx%d blit=%d\n",
1022 tex_width, tex->height, blit_width );
1023
Kevin E Martin0994e632001-01-05 22:57:55 +00001024 /* Flush the pixel cache. This ensures no pixel data gets mixed
1025 * up with the texture data from the host data blit, otherwise
1026 * part of the texture image may be corrupted.
1027 */
1028 BEGIN_RING( 4 );
1029
1030 RADEON_FLUSH_CACHE();
1031 RADEON_WAIT_UNTIL_IDLE();
1032
1033 ADVANCE_RING();
1034
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001035 /* Make a copy of the parameters in case we have to update them
1036 * for a multi-pass texture blit.
1037 */
1038 y = image->y;
1039 height = image->height;
1040 data = (u8 *)image->data;
1041
1042 size = height * blit_width;
1043
1044 if ( size > RADEON_MAX_TEXTURE_SIZE ) {
1045 /* Texture image is too large, do a multipass upload */
1046 ret = -EAGAIN;
1047
1048 /* Adjust the blit size to fit the indirect buffer */
1049 height = RADEON_MAX_TEXTURE_SIZE / blit_width;
1050 size = height * blit_width;
1051
1052 /* Update the input parameters for next time */
1053 image->y += height;
1054 image->height -= height;
1055 image->data = (char *)image->data + size;
1056
1057 if ( copy_to_user( tex->image, image, sizeof(*image) ) ) {
1058 DRM_ERROR( "EFAULT on tex->image\n" );
1059 return -EFAULT;
1060 }
David Dawes44aa4d62002-01-27 20:05:42 +00001061 } else if ( size < 4 && size > 0 ) {
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001062 size = 4;
1063 }
1064
1065 dwords = size / 4;
1066
Kevin E Martin0994e632001-01-05 22:57:55 +00001067 /* Dispatch the indirect buffer.
1068 */
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001069 buffer = (u32 *)((char *)dev_priv->buffers->handle + buf->offset);
Kevin E Martin0994e632001-01-05 22:57:55 +00001070
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001071 buffer[0] = CP_PACKET3( RADEON_CNTL_HOSTDATA_BLT, dwords + 6 );
1072 buffer[1] = (RADEON_GMC_DST_PITCH_OFFSET_CNTL |
1073 RADEON_GMC_BRUSH_NONE |
1074 (format << 8) |
1075 RADEON_GMC_SRC_DATATYPE_COLOR |
1076 RADEON_ROP3_S |
1077 RADEON_DP_SRC_SOURCE_HOST_DATA |
1078 RADEON_GMC_CLR_CMP_CNTL_DIS |
1079 RADEON_GMC_WR_MSK_DIS);
1080
1081 buffer[2] = (tex->pitch << 22) | (tex->offset >> 10);
1082 buffer[3] = 0xffffffff;
1083 buffer[4] = 0xffffffff;
1084 buffer[5] = (y << 16) | image->x;
1085 buffer[6] = (height << 16) | image->width;
1086 buffer[7] = dwords;
1087
1088 buffer += 8;
1089
1090 if ( tex_width >= 32 ) {
1091 /* Texture image width is larger than the minimum, so we
1092 * can upload it directly.
1093 */
1094 if ( copy_from_user( buffer, data, dwords * sizeof(u32) ) ) {
1095 DRM_ERROR( "EFAULT on data, %d dwords\n", dwords );
1096 return -EFAULT;
1097 }
1098 } else {
1099 /* Texture image width is less than the minimum, so we
1100 * need to pad out each image scanline to the minimum
1101 * width.
1102 */
1103 for ( i = 0 ; i < tex->height ; i++ ) {
1104 if ( copy_from_user( buffer, data, tex_width ) ) {
1105 DRM_ERROR( "EFAULT on pad, %d bytes\n",
1106 tex_width );
1107 return -EFAULT;
1108 }
1109 buffer += 8;
1110 data += tex_width;
1111 }
Kevin E Martin0994e632001-01-05 22:57:55 +00001112 }
1113
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001114 buf->pid = current->pid;
David Dawes0e5b8d72001-03-19 17:45:52 +00001115 buf->used = (dwords + 8) * sizeof(u32);
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001116 buf_priv->discard = 1;
David Dawes0e5b8d72001-03-19 17:45:52 +00001117
Kevin E Martin0994e632001-01-05 22:57:55 +00001118 radeon_cp_dispatch_indirect( dev, buf, 0, buf->used );
1119
1120 /* Flush the pixel cache after the blit completes. This ensures
1121 * the texture data is written out to memory before rendering
1122 * continues.
1123 */
1124 BEGIN_RING( 4 );
1125
1126 RADEON_FLUSH_CACHE();
1127 RADEON_WAIT_UNTIL_2D_IDLE();
1128
1129 ADVANCE_RING();
1130
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001131 return ret;
Kevin E Martin0994e632001-01-05 22:57:55 +00001132}
1133
1134static void radeon_cp_dispatch_stipple( drm_device_t *dev, u32 *stipple )
1135{
1136 drm_radeon_private_t *dev_priv = dev->dev_private;
1137 int i;
1138 RING_LOCALS;
1139 DRM_DEBUG( "%s\n", __FUNCTION__ );
1140
Kevin E Martin0994e632001-01-05 22:57:55 +00001141 BEGIN_RING( 35 );
1142
1143 OUT_RING( CP_PACKET0( RADEON_RE_STIPPLE_ADDR, 0 ) );
1144 OUT_RING( 0x00000000 );
1145
1146 OUT_RING( CP_PACKET0_TABLE( RADEON_RE_STIPPLE_DATA, 31 ) );
1147 for ( i = 0 ; i < 32 ; i++ ) {
1148 OUT_RING( stipple[i] );
1149 }
1150
1151 ADVANCE_RING();
1152}
1153
1154
1155/* ================================================================
1156 * IOCTL functions
1157 */
1158
1159int radeon_cp_clear( struct inode *inode, struct file *filp,
1160 unsigned int cmd, unsigned long arg )
1161{
1162 drm_file_t *priv = filp->private_data;
1163 drm_device_t *dev = priv->dev;
1164 drm_radeon_private_t *dev_priv = dev->dev_private;
1165 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1166 drm_radeon_clear_t clear;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001167 drm_radeon_clear_rect_t depth_boxes[RADEON_NR_SAREA_CLIPRECTS];
Kevin E Martin0994e632001-01-05 22:57:55 +00001168 DRM_DEBUG( "%s\n", __FUNCTION__ );
1169
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001170 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001171
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001172 if ( copy_from_user( &clear, (drm_radeon_clear_t *)arg,
Kevin E Martin0994e632001-01-05 22:57:55 +00001173 sizeof(clear) ) )
1174 return -EFAULT;
1175
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001176 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1177
Kevin E Martin0994e632001-01-05 22:57:55 +00001178 if ( sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS )
1179 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
1180
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001181 if ( copy_from_user( &depth_boxes, clear.depth_boxes,
1182 sarea_priv->nbox * sizeof(depth_boxes[0]) ) )
1183 return -EFAULT;
1184
1185 radeon_cp_dispatch_clear( dev, &clear, depth_boxes );
Kevin E Martin0994e632001-01-05 22:57:55 +00001186
1187 return 0;
1188}
1189
1190int radeon_cp_swap( struct inode *inode, struct file *filp,
1191 unsigned int cmd, unsigned long arg )
1192{
1193 drm_file_t *priv = filp->private_data;
1194 drm_device_t *dev = priv->dev;
1195 drm_radeon_private_t *dev_priv = dev->dev_private;
1196 drm_radeon_sarea_t *sarea_priv = dev_priv->sarea_priv;
1197 DRM_DEBUG( "%s\n", __FUNCTION__ );
1198
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001199 LOCK_TEST_WITH_RETURN( dev );
1200
1201 RING_SPACE_TEST_WITH_RETURN( dev_priv );
Gareth Hughes4d2a4452001-01-24 15:34:46 +00001202
Kevin E Martin0994e632001-01-05 22:57:55 +00001203 if ( sarea_priv->nbox > RADEON_NR_SAREA_CLIPRECTS )
1204 sarea_priv->nbox = RADEON_NR_SAREA_CLIPRECTS;
1205
1206 if ( !dev_priv->page_flipping ) {
1207 radeon_cp_dispatch_swap( dev );
1208 dev_priv->sarea_priv->dirty |= (RADEON_UPLOAD_CONTEXT |
1209 RADEON_UPLOAD_MASKS);
1210 } else {
1211 radeon_cp_dispatch_flip( dev );
1212 }
1213
1214 return 0;
1215}
1216
1217int radeon_cp_vertex( struct inode *inode, struct file *filp,
1218 unsigned int cmd, unsigned long arg )
1219{
1220 drm_file_t *priv = filp->private_data;
1221 drm_device_t *dev = priv->dev;
1222 drm_radeon_private_t *dev_priv = dev->dev_private;
1223 drm_device_dma_t *dma = dev->dma;
1224 drm_buf_t *buf;
1225 drm_radeon_buf_priv_t *buf_priv;
1226 drm_radeon_vertex_t vertex;
1227
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001228 LOCK_TEST_WITH_RETURN( dev );
1229
1230 if ( !dev_priv ) {
1231 DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
Kevin E Martin0994e632001-01-05 22:57:55 +00001232 return -EINVAL;
1233 }
1234
1235 if ( copy_from_user( &vertex, (drm_radeon_vertex_t *)arg,
1236 sizeof(vertex) ) )
1237 return -EFAULT;
1238
1239 DRM_DEBUG( "%s: pid=%d index=%d count=%d discard=%d\n",
1240 __FUNCTION__, current->pid,
1241 vertex.idx, vertex.count, vertex.discard );
1242
1243 if ( vertex.idx < 0 || vertex.idx >= dma->buf_count ) {
1244 DRM_ERROR( "buffer index %d (of %d max)\n",
1245 vertex.idx, dma->buf_count - 1 );
1246 return -EINVAL;
1247 }
1248 if ( vertex.prim < 0 ||
1249 vertex.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST ) {
1250 DRM_ERROR( "buffer prim %d\n", vertex.prim );
1251 return -EINVAL;
1252 }
1253
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001254 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1255 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001256
1257 buf = dma->buflist[vertex.idx];
1258 buf_priv = buf->dev_private;
1259
1260 if ( buf->pid != current->pid ) {
1261 DRM_ERROR( "process %d using buffer owned by %d\n",
1262 current->pid, buf->pid );
1263 return -EINVAL;
1264 }
1265 if ( buf->pending ) {
1266 DRM_ERROR( "sending pending buffer %d\n", vertex.idx );
1267 return -EINVAL;
1268 }
1269
1270 buf->used = vertex.count;
1271 buf_priv->prim = vertex.prim;
1272 buf_priv->discard = vertex.discard;
1273
1274 radeon_cp_dispatch_vertex( dev, buf );
1275
1276 return 0;
1277}
1278
1279int radeon_cp_indices( struct inode *inode, struct file *filp,
1280 unsigned int cmd, unsigned long arg )
1281{
1282 drm_file_t *priv = filp->private_data;
1283 drm_device_t *dev = priv->dev;
1284 drm_radeon_private_t *dev_priv = dev->dev_private;
1285 drm_device_dma_t *dma = dev->dma;
1286 drm_buf_t *buf;
1287 drm_radeon_buf_priv_t *buf_priv;
1288 drm_radeon_indices_t elts;
1289 int count;
1290
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001291 LOCK_TEST_WITH_RETURN( dev );
1292
1293 if ( !dev_priv ) {
1294 DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
Kevin E Martin0994e632001-01-05 22:57:55 +00001295 return -EINVAL;
1296 }
1297
1298 if ( copy_from_user( &elts, (drm_radeon_indices_t *)arg,
1299 sizeof(elts) ) )
1300 return -EFAULT;
1301
1302 DRM_DEBUG( "%s: pid=%d index=%d start=%d end=%d discard=%d\n",
1303 __FUNCTION__, current->pid,
1304 elts.idx, elts.start, elts.end, elts.discard );
1305
1306 if ( elts.idx < 0 || elts.idx >= dma->buf_count ) {
1307 DRM_ERROR( "buffer index %d (of %d max)\n",
1308 elts.idx, dma->buf_count - 1 );
1309 return -EINVAL;
1310 }
1311 if ( elts.prim < 0 ||
1312 elts.prim > RADEON_PRIM_TYPE_3VRT_LINE_LIST ) {
1313 DRM_ERROR( "buffer prim %d\n", elts.prim );
1314 return -EINVAL;
1315 }
1316
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001317 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1318 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001319
1320 buf = dma->buflist[elts.idx];
1321 buf_priv = buf->dev_private;
1322
1323 if ( buf->pid != current->pid ) {
1324 DRM_ERROR( "process %d using buffer owned by %d\n",
1325 current->pid, buf->pid );
1326 return -EINVAL;
1327 }
1328 if ( buf->pending ) {
1329 DRM_ERROR( "sending pending buffer %d\n", elts.idx );
1330 return -EINVAL;
1331 }
1332
1333 count = (elts.end - elts.start) / sizeof(u16);
1334 elts.start -= RADEON_INDEX_PRIM_OFFSET;
1335
1336 if ( elts.start & 0x7 ) {
1337 DRM_ERROR( "misaligned buffer 0x%x\n", elts.start );
1338 return -EINVAL;
1339 }
1340 if ( elts.start < buf->used ) {
1341 DRM_ERROR( "no header 0x%x - 0x%x\n", elts.start, buf->used );
1342 return -EINVAL;
1343 }
1344
1345 buf->used = elts.end;
1346 buf_priv->prim = elts.prim;
1347 buf_priv->discard = elts.discard;
1348
1349 radeon_cp_dispatch_indices( dev, buf, elts.start, elts.end, count );
1350
1351 return 0;
1352}
1353
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001354int radeon_cp_texture( struct inode *inode, struct file *filp,
1355 unsigned int cmd, unsigned long arg )
Kevin E Martin0994e632001-01-05 22:57:55 +00001356{
1357 drm_file_t *priv = filp->private_data;
1358 drm_device_t *dev = priv->dev;
1359 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001360 drm_radeon_texture_t tex;
1361 drm_radeon_tex_image_t image;
Kevin E Martin0994e632001-01-05 22:57:55 +00001362
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001363 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001364
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001365 if ( copy_from_user( &tex, (drm_radeon_texture_t *)arg, sizeof(tex) ) )
Gareth Hughes3a74d3a2001-03-06 04:37:37 +00001366 return -EFAULT;
1367
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001368 if ( tex.image == NULL ) {
1369 DRM_ERROR( "null texture image!\n" );
David Dawes0e5b8d72001-03-19 17:45:52 +00001370 return -EINVAL;
1371 }
1372
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001373 if ( copy_from_user( &image,
1374 (drm_radeon_tex_image_t *)tex.image,
1375 sizeof(image) ) )
1376 return -EFAULT;
David Dawes0e5b8d72001-03-19 17:45:52 +00001377
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001378 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1379 VB_AGE_TEST_WITH_RETURN( dev_priv );
1380
1381 return radeon_cp_dispatch_texture( dev, &tex, &image );
Kevin E Martin0994e632001-01-05 22:57:55 +00001382}
1383
1384int radeon_cp_stipple( struct inode *inode, struct file *filp,
1385 unsigned int cmd, unsigned long arg )
1386{
1387 drm_file_t *priv = filp->private_data;
1388 drm_device_t *dev = priv->dev;
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001389 drm_radeon_private_t *dev_priv = dev->dev_private;
Kevin E Martin0994e632001-01-05 22:57:55 +00001390 drm_radeon_stipple_t stipple;
1391 u32 mask[32];
1392
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001393 LOCK_TEST_WITH_RETURN( dev );
Kevin E Martin0994e632001-01-05 22:57:55 +00001394
1395 if ( copy_from_user( &stipple, (drm_radeon_stipple_t *)arg,
1396 sizeof(stipple) ) )
1397 return -EFAULT;
1398
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001399 if ( copy_from_user( &mask, stipple.mask, 32 * sizeof(u32) ) )
Kevin E Martin0994e632001-01-05 22:57:55 +00001400 return -EFAULT;
1401
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001402 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1403
Kevin E Martin0994e632001-01-05 22:57:55 +00001404 radeon_cp_dispatch_stipple( dev, mask );
1405
1406 return 0;
1407}
1408
1409int radeon_cp_indirect( struct inode *inode, struct file *filp,
1410 unsigned int cmd, unsigned long arg )
1411{
1412 drm_file_t *priv = filp->private_data;
1413 drm_device_t *dev = priv->dev;
1414 drm_radeon_private_t *dev_priv = dev->dev_private;
1415 drm_device_dma_t *dma = dev->dma;
1416 drm_buf_t *buf;
1417 drm_radeon_buf_priv_t *buf_priv;
1418 drm_radeon_indirect_t indirect;
1419 RING_LOCALS;
1420
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001421 LOCK_TEST_WITH_RETURN( dev );
1422
1423 if ( !dev_priv ) {
1424 DRM_ERROR( "%s called with no initialization\n", __FUNCTION__ );
Kevin E Martin0994e632001-01-05 22:57:55 +00001425 return -EINVAL;
1426 }
1427
1428 if ( copy_from_user( &indirect, (drm_radeon_indirect_t *)arg,
1429 sizeof(indirect) ) )
1430 return -EFAULT;
1431
1432 DRM_DEBUG( "indirect: idx=%d s=%d e=%d d=%d\n",
1433 indirect.idx, indirect.start,
1434 indirect.end, indirect.discard );
1435
1436 if ( indirect.idx < 0 || indirect.idx >= dma->buf_count ) {
1437 DRM_ERROR( "buffer index %d (of %d max)\n",
1438 indirect.idx, dma->buf_count - 1 );
1439 return -EINVAL;
1440 }
1441
1442 buf = dma->buflist[indirect.idx];
1443 buf_priv = buf->dev_private;
1444
1445 if ( buf->pid != current->pid ) {
1446 DRM_ERROR( "process %d using buffer owned by %d\n",
1447 current->pid, buf->pid );
1448 return -EINVAL;
1449 }
1450 if ( buf->pending ) {
1451 DRM_ERROR( "sending pending buffer %d\n", indirect.idx );
1452 return -EINVAL;
1453 }
1454
1455 if ( indirect.start < buf->used ) {
1456 DRM_ERROR( "reusing indirect: start=0x%x actual=0x%x\n",
1457 indirect.start, buf->used );
1458 return -EINVAL;
1459 }
1460
Kevin E Martin5d6ddbc2001-04-05 22:16:12 +00001461 RING_SPACE_TEST_WITH_RETURN( dev_priv );
1462 VB_AGE_TEST_WITH_RETURN( dev_priv );
Kevin E Martin0994e632001-01-05 22:57:55 +00001463
1464 buf->used = indirect.end;
1465 buf_priv->discard = indirect.discard;
1466
1467 /* Wait for the 3D stream to idle before the indirect buffer
1468 * containing 2D acceleration commands is processed.
1469 */
1470 BEGIN_RING( 2 );
1471
1472 RADEON_WAIT_UNTIL_3D_IDLE();
1473
1474 ADVANCE_RING();
1475
1476 /* Dispatch the indirect buffer full of commands from the
1477 * X server. This is insecure and is thus only available to
1478 * privileged clients.
1479 */
1480 radeon_cp_dispatch_indirect( dev, buf, indirect.start, indirect.end );
1481
1482 return 0;
1483}