blob: e9b262fdaa2cff73db2cdd20b885cd490e85172e [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050027#include "drmP.h"
28#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050030#include "radeon_drm.h"
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
Alex Deucher2281a372010-10-21 13:31:38 -040035#include "evergreen_blit_shaders.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050036
Alex Deucherfe251e22010-03-24 13:36:43 -040037#define EVERGREEN_PFP_UCODE_SIZE 1120
38#define EVERGREEN_PM4_UCODE_SIZE 1376
39
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050040static void evergreen_gpu_init(struct radeon_device *rdev);
41void evergreen_fini(struct radeon_device *rdev);
42
Alex Deucher21a81222010-07-02 12:58:16 -040043/* get temperature in millidegrees */
44u32 evergreen_get_temp(struct radeon_device *rdev)
45{
46 u32 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
47 ASIC_T_SHIFT;
48 u32 actual_temp = 0;
49
50 if ((temp >> 10) & 1)
51 actual_temp = 0;
52 else if ((temp >> 9) & 1)
53 actual_temp = 255;
54 else
55 actual_temp = (temp >> 1) & 0xff;
56
57 return actual_temp * 1000;
58}
59
Alex Deucher49e02b72010-04-23 17:57:27 -040060void evergreen_pm_misc(struct radeon_device *rdev)
61{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -040062 int req_ps_idx = rdev->pm.requested_power_state_index;
63 int req_cm_idx = rdev->pm.requested_clock_mode_index;
64 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
65 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -040066
Alex Deucher4d601732010-06-07 18:15:18 -040067 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
68 if (voltage->voltage != rdev->pm.current_vddc) {
69 radeon_atom_set_voltage(rdev, voltage->voltage);
70 rdev->pm.current_vddc = voltage->voltage;
Rafał Miłecki0fcbe942010-06-07 18:25:21 -040071 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
Alex Deucher4d601732010-06-07 18:15:18 -040072 }
73 }
Alex Deucher49e02b72010-04-23 17:57:27 -040074}
75
76void evergreen_pm_prepare(struct radeon_device *rdev)
77{
78 struct drm_device *ddev = rdev->ddev;
79 struct drm_crtc *crtc;
80 struct radeon_crtc *radeon_crtc;
81 u32 tmp;
82
83 /* disable any active CRTCs */
84 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
85 radeon_crtc = to_radeon_crtc(crtc);
86 if (radeon_crtc->enabled) {
87 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
88 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
89 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
90 }
91 }
92}
93
94void evergreen_pm_finish(struct radeon_device *rdev)
95{
96 struct drm_device *ddev = rdev->ddev;
97 struct drm_crtc *crtc;
98 struct radeon_crtc *radeon_crtc;
99 u32 tmp;
100
101 /* enable any active CRTCs */
102 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
103 radeon_crtc = to_radeon_crtc(crtc);
104 if (radeon_crtc->enabled) {
105 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
106 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
107 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
108 }
109 }
110}
111
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500112bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
113{
114 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500115
116 switch (hpd) {
117 case RADEON_HPD_1:
118 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
119 connected = true;
120 break;
121 case RADEON_HPD_2:
122 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
123 connected = true;
124 break;
125 case RADEON_HPD_3:
126 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
127 connected = true;
128 break;
129 case RADEON_HPD_4:
130 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
131 connected = true;
132 break;
133 case RADEON_HPD_5:
134 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
135 connected = true;
136 break;
137 case RADEON_HPD_6:
138 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
139 connected = true;
140 break;
141 default:
142 break;
143 }
144
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500145 return connected;
146}
147
148void evergreen_hpd_set_polarity(struct radeon_device *rdev,
149 enum radeon_hpd_id hpd)
150{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500151 u32 tmp;
152 bool connected = evergreen_hpd_sense(rdev, hpd);
153
154 switch (hpd) {
155 case RADEON_HPD_1:
156 tmp = RREG32(DC_HPD1_INT_CONTROL);
157 if (connected)
158 tmp &= ~DC_HPDx_INT_POLARITY;
159 else
160 tmp |= DC_HPDx_INT_POLARITY;
161 WREG32(DC_HPD1_INT_CONTROL, tmp);
162 break;
163 case RADEON_HPD_2:
164 tmp = RREG32(DC_HPD2_INT_CONTROL);
165 if (connected)
166 tmp &= ~DC_HPDx_INT_POLARITY;
167 else
168 tmp |= DC_HPDx_INT_POLARITY;
169 WREG32(DC_HPD2_INT_CONTROL, tmp);
170 break;
171 case RADEON_HPD_3:
172 tmp = RREG32(DC_HPD3_INT_CONTROL);
173 if (connected)
174 tmp &= ~DC_HPDx_INT_POLARITY;
175 else
176 tmp |= DC_HPDx_INT_POLARITY;
177 WREG32(DC_HPD3_INT_CONTROL, tmp);
178 break;
179 case RADEON_HPD_4:
180 tmp = RREG32(DC_HPD4_INT_CONTROL);
181 if (connected)
182 tmp &= ~DC_HPDx_INT_POLARITY;
183 else
184 tmp |= DC_HPDx_INT_POLARITY;
185 WREG32(DC_HPD4_INT_CONTROL, tmp);
186 break;
187 case RADEON_HPD_5:
188 tmp = RREG32(DC_HPD5_INT_CONTROL);
189 if (connected)
190 tmp &= ~DC_HPDx_INT_POLARITY;
191 else
192 tmp |= DC_HPDx_INT_POLARITY;
193 WREG32(DC_HPD5_INT_CONTROL, tmp);
194 break;
195 case RADEON_HPD_6:
196 tmp = RREG32(DC_HPD6_INT_CONTROL);
197 if (connected)
198 tmp &= ~DC_HPDx_INT_POLARITY;
199 else
200 tmp |= DC_HPDx_INT_POLARITY;
201 WREG32(DC_HPD6_INT_CONTROL, tmp);
202 break;
203 default:
204 break;
205 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500206}
207
208void evergreen_hpd_init(struct radeon_device *rdev)
209{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500210 struct drm_device *dev = rdev->ddev;
211 struct drm_connector *connector;
212 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
213 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500214
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500215 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
216 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
217 switch (radeon_connector->hpd.hpd) {
218 case RADEON_HPD_1:
219 WREG32(DC_HPD1_CONTROL, tmp);
220 rdev->irq.hpd[0] = true;
221 break;
222 case RADEON_HPD_2:
223 WREG32(DC_HPD2_CONTROL, tmp);
224 rdev->irq.hpd[1] = true;
225 break;
226 case RADEON_HPD_3:
227 WREG32(DC_HPD3_CONTROL, tmp);
228 rdev->irq.hpd[2] = true;
229 break;
230 case RADEON_HPD_4:
231 WREG32(DC_HPD4_CONTROL, tmp);
232 rdev->irq.hpd[3] = true;
233 break;
234 case RADEON_HPD_5:
235 WREG32(DC_HPD5_CONTROL, tmp);
236 rdev->irq.hpd[4] = true;
237 break;
238 case RADEON_HPD_6:
239 WREG32(DC_HPD6_CONTROL, tmp);
240 rdev->irq.hpd[5] = true;
241 break;
242 default:
243 break;
244 }
245 }
246 if (rdev->irq.installed)
247 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500248}
249
250void evergreen_hpd_fini(struct radeon_device *rdev)
251{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500252 struct drm_device *dev = rdev->ddev;
253 struct drm_connector *connector;
254
255 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
256 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
257 switch (radeon_connector->hpd.hpd) {
258 case RADEON_HPD_1:
259 WREG32(DC_HPD1_CONTROL, 0);
260 rdev->irq.hpd[0] = false;
261 break;
262 case RADEON_HPD_2:
263 WREG32(DC_HPD2_CONTROL, 0);
264 rdev->irq.hpd[1] = false;
265 break;
266 case RADEON_HPD_3:
267 WREG32(DC_HPD3_CONTROL, 0);
268 rdev->irq.hpd[2] = false;
269 break;
270 case RADEON_HPD_4:
271 WREG32(DC_HPD4_CONTROL, 0);
272 rdev->irq.hpd[3] = false;
273 break;
274 case RADEON_HPD_5:
275 WREG32(DC_HPD5_CONTROL, 0);
276 rdev->irq.hpd[4] = false;
277 break;
278 case RADEON_HPD_6:
279 WREG32(DC_HPD6_CONTROL, 0);
280 rdev->irq.hpd[5] = false;
281 break;
282 default:
283 break;
284 }
285 }
286}
287
Alex Deucherf9d9c362010-10-22 02:51:05 -0400288/* watermark setup */
289
290static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
291 struct radeon_crtc *radeon_crtc,
292 struct drm_display_mode *mode,
293 struct drm_display_mode *other_mode)
294{
295 u32 tmp = 0;
296 /*
297 * Line Buffer Setup
298 * There are 3 line buffers, each one shared by 2 display controllers.
299 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
300 * the display controllers. The paritioning is done via one of four
301 * preset allocations specified in bits 2:0:
302 * first display controller
303 * 0 - first half of lb (3840 * 2)
304 * 1 - first 3/4 of lb (5760 * 2)
305 * 2 - whole lb (7680 * 2)
306 * 3 - first 1/4 of lb (1920 * 2)
307 * second display controller
308 * 4 - second half of lb (3840 * 2)
309 * 5 - second 3/4 of lb (5760 * 2)
310 * 6 - whole lb (7680 * 2)
311 * 7 - last 1/4 of lb (1920 * 2)
312 */
313 if (mode && other_mode) {
314 if (mode->hdisplay > other_mode->hdisplay) {
315 if (mode->hdisplay > 2560)
316 tmp = 1; /* 3/4 */
317 else
318 tmp = 0; /* 1/2 */
319 } else if (other_mode->hdisplay > mode->hdisplay) {
320 if (other_mode->hdisplay > 2560)
321 tmp = 3; /* 1/4 */
322 else
323 tmp = 0; /* 1/2 */
324 } else
325 tmp = 0; /* 1/2 */
326 } else if (mode)
327 tmp = 2; /* whole */
328 else if (other_mode)
329 tmp = 3; /* 1/4 */
330
331 /* second controller of the pair uses second half of the lb */
332 if (radeon_crtc->crtc_id % 2)
333 tmp += 4;
334 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
335
336 switch (tmp) {
337 case 0:
338 case 4:
339 default:
340 return 3840 * 2;
341 case 1:
342 case 5:
343 return 5760 * 2;
344 case 2:
345 case 6:
346 return 7680 * 2;
347 case 3:
348 case 7:
349 return 1920 * 2;
350 }
351}
352
353static u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
354{
355 u32 tmp = RREG32(MC_SHARED_CHMAP);
356
357 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
358 case 0:
359 default:
360 return 1;
361 case 1:
362 return 2;
363 case 2:
364 return 4;
365 case 3:
366 return 8;
367 }
368}
369
370struct evergreen_wm_params {
371 u32 dram_channels; /* number of dram channels */
372 u32 yclk; /* bandwidth per dram data pin in kHz */
373 u32 sclk; /* engine clock in kHz */
374 u32 disp_clk; /* display clock in kHz */
375 u32 src_width; /* viewport width */
376 u32 active_time; /* active display time in ns */
377 u32 blank_time; /* blank time in ns */
378 bool interlaced; /* mode is interlaced */
379 fixed20_12 vsc; /* vertical scale ratio */
380 u32 num_heads; /* number of active crtcs */
381 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
382 u32 lb_size; /* line buffer allocated to pipe */
383 u32 vtaps; /* vertical scaler taps */
384};
385
386static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
387{
388 /* Calculate DRAM Bandwidth and the part allocated to display. */
389 fixed20_12 dram_efficiency; /* 0.7 */
390 fixed20_12 yclk, dram_channels, bandwidth;
391 fixed20_12 a;
392
393 a.full = dfixed_const(1000);
394 yclk.full = dfixed_const(wm->yclk);
395 yclk.full = dfixed_div(yclk, a);
396 dram_channels.full = dfixed_const(wm->dram_channels * 4);
397 a.full = dfixed_const(10);
398 dram_efficiency.full = dfixed_const(7);
399 dram_efficiency.full = dfixed_div(dram_efficiency, a);
400 bandwidth.full = dfixed_mul(dram_channels, yclk);
401 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
402
403 return dfixed_trunc(bandwidth);
404}
405
406static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
407{
408 /* Calculate DRAM Bandwidth and the part allocated to display. */
409 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
410 fixed20_12 yclk, dram_channels, bandwidth;
411 fixed20_12 a;
412
413 a.full = dfixed_const(1000);
414 yclk.full = dfixed_const(wm->yclk);
415 yclk.full = dfixed_div(yclk, a);
416 dram_channels.full = dfixed_const(wm->dram_channels * 4);
417 a.full = dfixed_const(10);
418 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
419 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
420 bandwidth.full = dfixed_mul(dram_channels, yclk);
421 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
422
423 return dfixed_trunc(bandwidth);
424}
425
426static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
427{
428 /* Calculate the display Data return Bandwidth */
429 fixed20_12 return_efficiency; /* 0.8 */
430 fixed20_12 sclk, bandwidth;
431 fixed20_12 a;
432
433 a.full = dfixed_const(1000);
434 sclk.full = dfixed_const(wm->sclk);
435 sclk.full = dfixed_div(sclk, a);
436 a.full = dfixed_const(10);
437 return_efficiency.full = dfixed_const(8);
438 return_efficiency.full = dfixed_div(return_efficiency, a);
439 a.full = dfixed_const(32);
440 bandwidth.full = dfixed_mul(a, sclk);
441 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
442
443 return dfixed_trunc(bandwidth);
444}
445
446static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
447{
448 /* Calculate the DMIF Request Bandwidth */
449 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
450 fixed20_12 disp_clk, bandwidth;
451 fixed20_12 a;
452
453 a.full = dfixed_const(1000);
454 disp_clk.full = dfixed_const(wm->disp_clk);
455 disp_clk.full = dfixed_div(disp_clk, a);
456 a.full = dfixed_const(10);
457 disp_clk_request_efficiency.full = dfixed_const(8);
458 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
459 a.full = dfixed_const(32);
460 bandwidth.full = dfixed_mul(a, disp_clk);
461 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
462
463 return dfixed_trunc(bandwidth);
464}
465
466static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
467{
468 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
469 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
470 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
471 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
472
473 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
474}
475
476static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
477{
478 /* Calculate the display mode Average Bandwidth
479 * DisplayMode should contain the source and destination dimensions,
480 * timing, etc.
481 */
482 fixed20_12 bpp;
483 fixed20_12 line_time;
484 fixed20_12 src_width;
485 fixed20_12 bandwidth;
486 fixed20_12 a;
487
488 a.full = dfixed_const(1000);
489 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
490 line_time.full = dfixed_div(line_time, a);
491 bpp.full = dfixed_const(wm->bytes_per_pixel);
492 src_width.full = dfixed_const(wm->src_width);
493 bandwidth.full = dfixed_mul(src_width, bpp);
494 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
495 bandwidth.full = dfixed_div(bandwidth, line_time);
496
497 return dfixed_trunc(bandwidth);
498}
499
500static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
501{
502 /* First calcualte the latency in ns */
503 u32 mc_latency = 2000; /* 2000 ns. */
504 u32 available_bandwidth = evergreen_available_bandwidth(wm);
505 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
506 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
507 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
508 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
509 (wm->num_heads * cursor_line_pair_return_time);
510 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
511 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
512 fixed20_12 a, b, c;
513
514 if (wm->num_heads == 0)
515 return 0;
516
517 a.full = dfixed_const(2);
518 b.full = dfixed_const(1);
519 if ((wm->vsc.full > a.full) ||
520 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
521 (wm->vtaps >= 5) ||
522 ((wm->vsc.full >= a.full) && wm->interlaced))
523 max_src_lines_per_dst_line = 4;
524 else
525 max_src_lines_per_dst_line = 2;
526
527 a.full = dfixed_const(available_bandwidth);
528 b.full = dfixed_const(wm->num_heads);
529 a.full = dfixed_div(a, b);
530
531 b.full = dfixed_const(1000);
532 c.full = dfixed_const(wm->disp_clk);
533 b.full = dfixed_div(c, b);
534 c.full = dfixed_const(wm->bytes_per_pixel);
535 b.full = dfixed_mul(b, c);
536
537 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
538
539 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
540 b.full = dfixed_const(1000);
541 c.full = dfixed_const(lb_fill_bw);
542 b.full = dfixed_div(c, b);
543 a.full = dfixed_div(a, b);
544 line_fill_time = dfixed_trunc(a);
545
546 if (line_fill_time < wm->active_time)
547 return latency;
548 else
549 return latency + (line_fill_time - wm->active_time);
550
551}
552
553static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
554{
555 if (evergreen_average_bandwidth(wm) <=
556 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
557 return true;
558 else
559 return false;
560};
561
562static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
563{
564 if (evergreen_average_bandwidth(wm) <=
565 (evergreen_available_bandwidth(wm) / wm->num_heads))
566 return true;
567 else
568 return false;
569};
570
571static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
572{
573 u32 lb_partitions = wm->lb_size / wm->src_width;
574 u32 line_time = wm->active_time + wm->blank_time;
575 u32 latency_tolerant_lines;
576 u32 latency_hiding;
577 fixed20_12 a;
578
579 a.full = dfixed_const(1);
580 if (wm->vsc.full > a.full)
581 latency_tolerant_lines = 1;
582 else {
583 if (lb_partitions <= (wm->vtaps + 1))
584 latency_tolerant_lines = 1;
585 else
586 latency_tolerant_lines = 2;
587 }
588
589 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
590
591 if (evergreen_latency_watermark(wm) <= latency_hiding)
592 return true;
593 else
594 return false;
595}
596
597static void evergreen_program_watermarks(struct radeon_device *rdev,
598 struct radeon_crtc *radeon_crtc,
599 u32 lb_size, u32 num_heads)
600{
601 struct drm_display_mode *mode = &radeon_crtc->base.mode;
602 struct evergreen_wm_params wm;
603 u32 pixel_period;
604 u32 line_time = 0;
605 u32 latency_watermark_a = 0, latency_watermark_b = 0;
606 u32 priority_a_mark = 0, priority_b_mark = 0;
607 u32 priority_a_cnt = PRIORITY_OFF;
608 u32 priority_b_cnt = PRIORITY_OFF;
609 u32 pipe_offset = radeon_crtc->crtc_id * 16;
610 u32 tmp, arb_control3;
611 fixed20_12 a, b, c;
612
613 if (radeon_crtc->base.enabled && num_heads && mode) {
614 pixel_period = 1000000 / (u32)mode->clock;
615 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
616 priority_a_cnt = 0;
617 priority_b_cnt = 0;
618
619 wm.yclk = rdev->pm.current_mclk * 10;
620 wm.sclk = rdev->pm.current_sclk * 10;
621 wm.disp_clk = mode->clock;
622 wm.src_width = mode->crtc_hdisplay;
623 wm.active_time = mode->crtc_hdisplay * pixel_period;
624 wm.blank_time = line_time - wm.active_time;
625 wm.interlaced = false;
626 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
627 wm.interlaced = true;
628 wm.vsc = radeon_crtc->vsc;
629 wm.vtaps = 1;
630 if (radeon_crtc->rmx_type != RMX_OFF)
631 wm.vtaps = 2;
632 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
633 wm.lb_size = lb_size;
634 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
635 wm.num_heads = num_heads;
636
637 /* set for high clocks */
638 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
639 /* set for low clocks */
640 /* wm.yclk = low clk; wm.sclk = low clk */
641 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
642
643 /* possibly force display priority to high */
644 /* should really do this at mode validation time... */
645 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
646 !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
647 !evergreen_check_latency_hiding(&wm) ||
648 (rdev->disp_priority == 2)) {
649 DRM_INFO("force priority to high\n");
650 priority_a_cnt |= PRIORITY_ALWAYS_ON;
651 priority_b_cnt |= PRIORITY_ALWAYS_ON;
652 }
653
654 a.full = dfixed_const(1000);
655 b.full = dfixed_const(mode->clock);
656 b.full = dfixed_div(b, a);
657 c.full = dfixed_const(latency_watermark_a);
658 c.full = dfixed_mul(c, b);
659 c.full = dfixed_mul(c, radeon_crtc->hsc);
660 c.full = dfixed_div(c, a);
661 a.full = dfixed_const(16);
662 c.full = dfixed_div(c, a);
663 priority_a_mark = dfixed_trunc(c);
664 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
665
666 a.full = dfixed_const(1000);
667 b.full = dfixed_const(mode->clock);
668 b.full = dfixed_div(b, a);
669 c.full = dfixed_const(latency_watermark_b);
670 c.full = dfixed_mul(c, b);
671 c.full = dfixed_mul(c, radeon_crtc->hsc);
672 c.full = dfixed_div(c, a);
673 a.full = dfixed_const(16);
674 c.full = dfixed_div(c, a);
675 priority_b_mark = dfixed_trunc(c);
676 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
677 }
678
679 /* select wm A */
680 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
681 tmp = arb_control3;
682 tmp &= ~LATENCY_WATERMARK_MASK(3);
683 tmp |= LATENCY_WATERMARK_MASK(1);
684 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
685 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
686 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
687 LATENCY_HIGH_WATERMARK(line_time)));
688 /* select wm B */
689 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
690 tmp &= ~LATENCY_WATERMARK_MASK(3);
691 tmp |= LATENCY_WATERMARK_MASK(2);
692 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
693 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
694 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
695 LATENCY_HIGH_WATERMARK(line_time)));
696 /* restore original selection */
697 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
698
699 /* write the priority marks */
700 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
701 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
702
703}
704
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500705void evergreen_bandwidth_update(struct radeon_device *rdev)
706{
Alex Deucherf9d9c362010-10-22 02:51:05 -0400707 struct drm_display_mode *mode0 = NULL;
708 struct drm_display_mode *mode1 = NULL;
709 u32 num_heads = 0, lb_size;
710 int i;
711
712 radeon_update_display_priority(rdev);
713
714 for (i = 0; i < rdev->num_crtc; i++) {
715 if (rdev->mode_info.crtcs[i]->base.enabled)
716 num_heads++;
717 }
718 for (i = 0; i < rdev->num_crtc; i += 2) {
719 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
720 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
721 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
722 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
723 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
724 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
725 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500726}
727
728static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
729{
730 unsigned i;
731 u32 tmp;
732
733 for (i = 0; i < rdev->usec_timeout; i++) {
734 /* read MC_STATUS */
735 tmp = RREG32(SRBM_STATUS) & 0x1F00;
736 if (!tmp)
737 return 0;
738 udelay(1);
739 }
740 return -1;
741}
742
743/*
744 * GART
745 */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400746void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
747{
748 unsigned i;
749 u32 tmp;
750
751 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
752 for (i = 0; i < rdev->usec_timeout; i++) {
753 /* read MC_STATUS */
754 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
755 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
756 if (tmp == 2) {
757 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
758 return;
759 }
760 if (tmp) {
761 return;
762 }
763 udelay(1);
764 }
765}
766
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500767int evergreen_pcie_gart_enable(struct radeon_device *rdev)
768{
769 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400770 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500771
772 if (rdev->gart.table.vram.robj == NULL) {
773 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
774 return -EINVAL;
775 }
776 r = radeon_gart_table_vram_pin(rdev);
777 if (r)
778 return r;
Dave Airlie82568562010-02-05 16:00:07 +1000779 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500780 /* Setup L2 cache */
781 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
782 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
783 EFFECTIVE_L2_QUEUE_SIZE(7));
784 WREG32(VM_L2_CNTL2, 0);
785 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
786 /* Setup TLB control */
787 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
788 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
789 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
790 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
791 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
792 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
793 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
794 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
795 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
796 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
797 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
798 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
799 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
800 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
801 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
802 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
803 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
804 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -0400805 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500806
Alex Deucher0fcdb612010-03-24 13:20:41 -0400807 evergreen_pcie_gart_tlb_flush(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500808 rdev->gart.ready = true;
809 return 0;
810}
811
812void evergreen_pcie_gart_disable(struct radeon_device *rdev)
813{
814 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400815 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500816
817 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400818 WREG32(VM_CONTEXT0_CNTL, 0);
819 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500820
821 /* Setup L2 cache */
822 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
823 EFFECTIVE_L2_QUEUE_SIZE(7));
824 WREG32(VM_L2_CNTL2, 0);
825 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
826 /* Setup TLB control */
827 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
828 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
829 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
830 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
831 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
832 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
833 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
834 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
835 if (rdev->gart.table.vram.robj) {
836 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
837 if (likely(r == 0)) {
838 radeon_bo_kunmap(rdev->gart.table.vram.robj);
839 radeon_bo_unpin(rdev->gart.table.vram.robj);
840 radeon_bo_unreserve(rdev->gart.table.vram.robj);
841 }
842 }
843}
844
845void evergreen_pcie_gart_fini(struct radeon_device *rdev)
846{
847 evergreen_pcie_gart_disable(rdev);
848 radeon_gart_table_vram_free(rdev);
849 radeon_gart_fini(rdev);
850}
851
852
853void evergreen_agp_enable(struct radeon_device *rdev)
854{
855 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500856
857 /* Setup L2 cache */
858 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
859 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
860 EFFECTIVE_L2_QUEUE_SIZE(7));
861 WREG32(VM_L2_CNTL2, 0);
862 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
863 /* Setup TLB control */
864 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
865 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
866 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
867 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
868 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
869 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
870 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
871 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
872 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
873 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
874 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400875 WREG32(VM_CONTEXT0_CNTL, 0);
876 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500877}
878
879static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
880{
881 save->vga_control[0] = RREG32(D1VGA_CONTROL);
882 save->vga_control[1] = RREG32(D2VGA_CONTROL);
883 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
884 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
885 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
886 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
887 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
888 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
889 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
890 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
891 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
892 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
893 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
894 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
895
896 /* Stop all video */
897 WREG32(VGA_RENDER_CONTROL, 0);
898 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
899 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
900 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
901 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
902 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
903 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
904 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
905 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
906 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
907 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
908 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
909 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
910 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
911 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
912 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
913 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
914 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
915 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
916
917 WREG32(D1VGA_CONTROL, 0);
918 WREG32(D2VGA_CONTROL, 0);
919 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
920 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
921 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
922 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
923}
924
925static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
926{
927 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
928 upper_32_bits(rdev->mc.vram_start));
929 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
930 upper_32_bits(rdev->mc.vram_start));
931 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
932 (u32)rdev->mc.vram_start);
933 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
934 (u32)rdev->mc.vram_start);
935
936 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
937 upper_32_bits(rdev->mc.vram_start));
938 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
939 upper_32_bits(rdev->mc.vram_start));
940 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
941 (u32)rdev->mc.vram_start);
942 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
943 (u32)rdev->mc.vram_start);
944
945 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
946 upper_32_bits(rdev->mc.vram_start));
947 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
948 upper_32_bits(rdev->mc.vram_start));
949 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
950 (u32)rdev->mc.vram_start);
951 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
952 (u32)rdev->mc.vram_start);
953
954 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
955 upper_32_bits(rdev->mc.vram_start));
956 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
957 upper_32_bits(rdev->mc.vram_start));
958 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
959 (u32)rdev->mc.vram_start);
960 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
961 (u32)rdev->mc.vram_start);
962
963 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
964 upper_32_bits(rdev->mc.vram_start));
965 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
966 upper_32_bits(rdev->mc.vram_start));
967 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
968 (u32)rdev->mc.vram_start);
969 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
970 (u32)rdev->mc.vram_start);
971
972 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
973 upper_32_bits(rdev->mc.vram_start));
974 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
975 upper_32_bits(rdev->mc.vram_start));
976 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
977 (u32)rdev->mc.vram_start);
978 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
979 (u32)rdev->mc.vram_start);
980
981 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
982 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
983 /* Unlock host access */
984 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
985 mdelay(1);
986 /* Restore video state */
987 WREG32(D1VGA_CONTROL, save->vga_control[0]);
988 WREG32(D2VGA_CONTROL, save->vga_control[1]);
989 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
990 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
991 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
992 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
993 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
994 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
995 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
996 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
997 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
998 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
999 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
1000 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
1001 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
1002 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
1003 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
1004 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
1005 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1006 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1007 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1008 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1009 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1010 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1011 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
1012}
1013
1014static void evergreen_mc_program(struct radeon_device *rdev)
1015{
1016 struct evergreen_mc_save save;
1017 u32 tmp;
1018 int i, j;
1019
1020 /* Initialize HDP */
1021 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1022 WREG32((0x2c14 + j), 0x00000000);
1023 WREG32((0x2c18 + j), 0x00000000);
1024 WREG32((0x2c1c + j), 0x00000000);
1025 WREG32((0x2c20 + j), 0x00000000);
1026 WREG32((0x2c24 + j), 0x00000000);
1027 }
1028 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1029
1030 evergreen_mc_stop(rdev, &save);
1031 if (evergreen_mc_wait_for_idle(rdev)) {
1032 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1033 }
1034 /* Lockout access through VGA aperture*/
1035 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1036 /* Update configuration */
1037 if (rdev->flags & RADEON_IS_AGP) {
1038 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1039 /* VRAM before AGP */
1040 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1041 rdev->mc.vram_start >> 12);
1042 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1043 rdev->mc.gtt_end >> 12);
1044 } else {
1045 /* VRAM after AGP */
1046 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1047 rdev->mc.gtt_start >> 12);
1048 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1049 rdev->mc.vram_end >> 12);
1050 }
1051 } else {
1052 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1053 rdev->mc.vram_start >> 12);
1054 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1055 rdev->mc.vram_end >> 12);
1056 }
1057 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
1058 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1059 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1060 WREG32(MC_VM_FB_LOCATION, tmp);
1061 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1062 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
Jerome Glisse46fcd2b2010-06-03 19:34:48 +02001063 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001064 if (rdev->flags & RADEON_IS_AGP) {
1065 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1066 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1067 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1068 } else {
1069 WREG32(MC_VM_AGP_BASE, 0);
1070 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1071 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1072 }
1073 if (evergreen_mc_wait_for_idle(rdev)) {
1074 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1075 }
1076 evergreen_mc_resume(rdev, &save);
1077 /* we need to own VRAM, so turn off the VGA renderer here
1078 * to stop it overwriting our objects */
1079 rv515_vga_render_disable(rdev);
1080}
1081
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001082/*
1083 * CP.
1084 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001085
1086static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1087{
Alex Deucherfe251e22010-03-24 13:36:43 -04001088 const __be32 *fw_data;
1089 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001090
Alex Deucherfe251e22010-03-24 13:36:43 -04001091 if (!rdev->me_fw || !rdev->pfp_fw)
1092 return -EINVAL;
1093
1094 r700_cp_stop(rdev);
1095 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
1096
1097 fw_data = (const __be32 *)rdev->pfp_fw->data;
1098 WREG32(CP_PFP_UCODE_ADDR, 0);
1099 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
1100 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
1101 WREG32(CP_PFP_UCODE_ADDR, 0);
1102
1103 fw_data = (const __be32 *)rdev->me_fw->data;
1104 WREG32(CP_ME_RAM_WADDR, 0);
1105 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
1106 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
1107
1108 WREG32(CP_PFP_UCODE_ADDR, 0);
1109 WREG32(CP_ME_RAM_WADDR, 0);
1110 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001111 return 0;
1112}
1113
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001114static int evergreen_cp_start(struct radeon_device *rdev)
1115{
Alex Deucher2281a372010-10-21 13:31:38 -04001116 int r, i;
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001117 uint32_t cp_me;
1118
1119 r = radeon_ring_lock(rdev, 7);
1120 if (r) {
1121 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1122 return r;
1123 }
1124 radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
1125 radeon_ring_write(rdev, 0x1);
1126 radeon_ring_write(rdev, 0x0);
1127 radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
1128 radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1129 radeon_ring_write(rdev, 0);
1130 radeon_ring_write(rdev, 0);
1131 radeon_ring_unlock_commit(rdev);
1132
1133 cp_me = 0xff;
1134 WREG32(CP_ME_CNTL, cp_me);
1135
Alex Deucher2281a372010-10-21 13:31:38 -04001136 r = radeon_ring_lock(rdev, evergreen_default_size + 15);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001137 if (r) {
1138 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1139 return r;
1140 }
Alex Deucher2281a372010-10-21 13:31:38 -04001141
1142 /* setup clear context state */
1143 radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1144 radeon_ring_write(rdev, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1145
1146 for (i = 0; i < evergreen_default_size; i++)
1147 radeon_ring_write(rdev, evergreen_default_state[i]);
1148
1149 radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1150 radeon_ring_write(rdev, PACKET3_PREAMBLE_END_CLEAR_STATE);
1151
1152 /* set clear context state */
1153 radeon_ring_write(rdev, PACKET3(PACKET3_CLEAR_STATE, 0));
1154 radeon_ring_write(rdev, 0);
1155
1156 /* SQ_VTX_BASE_VTX_LOC */
1157 radeon_ring_write(rdev, 0xc0026f00);
1158 radeon_ring_write(rdev, 0x00000000);
1159 radeon_ring_write(rdev, 0x00000000);
1160 radeon_ring_write(rdev, 0x00000000);
1161
1162 /* Clear consts */
1163 radeon_ring_write(rdev, 0xc0036f00);
1164 radeon_ring_write(rdev, 0x00000bc4);
1165 radeon_ring_write(rdev, 0xffffffff);
1166 radeon_ring_write(rdev, 0xffffffff);
1167 radeon_ring_write(rdev, 0xffffffff);
1168
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001169 radeon_ring_unlock_commit(rdev);
1170
1171 return 0;
1172}
1173
Alex Deucherfe251e22010-03-24 13:36:43 -04001174int evergreen_cp_resume(struct radeon_device *rdev)
1175{
1176 u32 tmp;
1177 u32 rb_bufsz;
1178 int r;
1179
1180 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
1181 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
1182 SOFT_RESET_PA |
1183 SOFT_RESET_SH |
1184 SOFT_RESET_VGT |
1185 SOFT_RESET_SX));
1186 RREG32(GRBM_SOFT_RESET);
1187 mdelay(15);
1188 WREG32(GRBM_SOFT_RESET, 0);
1189 RREG32(GRBM_SOFT_RESET);
1190
1191 /* Set ring buffer size */
1192 rb_bufsz = drm_order(rdev->cp.ring_size / 8);
Alex Deucher724c80e2010-08-27 18:25:25 -04001193 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
Alex Deucherfe251e22010-03-24 13:36:43 -04001194#ifdef __BIG_ENDIAN
1195 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001196#endif
Alex Deucherfe251e22010-03-24 13:36:43 -04001197 WREG32(CP_RB_CNTL, tmp);
1198 WREG32(CP_SEM_WAIT_TIMER, 0x4);
1199
1200 /* Set the write pointer delay */
1201 WREG32(CP_RB_WPTR_DELAY, 0);
1202
1203 /* Initialize the ring buffer's read and write pointers */
1204 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
1205 WREG32(CP_RB_RPTR_WR, 0);
1206 WREG32(CP_RB_WPTR, 0);
Alex Deucher724c80e2010-08-27 18:25:25 -04001207
1208 /* set the wb address wether it's enabled or not */
1209 WREG32(CP_RB_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
1210 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
1211 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1212
1213 if (rdev->wb.enabled)
1214 WREG32(SCRATCH_UMSK, 0xff);
1215 else {
1216 tmp |= RB_NO_UPDATE;
1217 WREG32(SCRATCH_UMSK, 0);
1218 }
1219
Alex Deucherfe251e22010-03-24 13:36:43 -04001220 mdelay(1);
1221 WREG32(CP_RB_CNTL, tmp);
1222
1223 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
1224 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
1225
1226 rdev->cp.rptr = RREG32(CP_RB_RPTR);
1227 rdev->cp.wptr = RREG32(CP_RB_WPTR);
1228
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001229 evergreen_cp_start(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04001230 rdev->cp.ready = true;
1231 r = radeon_ring_test(rdev);
1232 if (r) {
1233 rdev->cp.ready = false;
1234 return r;
1235 }
1236 return 0;
1237}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001238
1239/*
1240 * Core functions
1241 */
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001242static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
1243 u32 num_tile_pipes,
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001244 u32 num_backends,
1245 u32 backend_disable_mask)
1246{
1247 u32 backend_map = 0;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001248 u32 enabled_backends_mask = 0;
1249 u32 enabled_backends_count = 0;
1250 u32 cur_pipe;
1251 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
1252 u32 cur_backend = 0;
1253 u32 i;
1254 bool force_no_swizzle;
1255
1256 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
1257 num_tile_pipes = EVERGREEN_MAX_PIPES;
1258 if (num_tile_pipes < 1)
1259 num_tile_pipes = 1;
1260 if (num_backends > EVERGREEN_MAX_BACKENDS)
1261 num_backends = EVERGREEN_MAX_BACKENDS;
1262 if (num_backends < 1)
1263 num_backends = 1;
1264
1265 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1266 if (((backend_disable_mask >> i) & 1) == 0) {
1267 enabled_backends_mask |= (1 << i);
1268 ++enabled_backends_count;
1269 }
1270 if (enabled_backends_count == num_backends)
1271 break;
1272 }
1273
1274 if (enabled_backends_count == 0) {
1275 enabled_backends_mask = 1;
1276 enabled_backends_count = 1;
1277 }
1278
1279 if (enabled_backends_count != num_backends)
1280 num_backends = enabled_backends_count;
1281
1282 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
1283 switch (rdev->family) {
1284 case CHIP_CEDAR:
1285 case CHIP_REDWOOD:
1286 force_no_swizzle = false;
1287 break;
1288 case CHIP_CYPRESS:
1289 case CHIP_HEMLOCK:
1290 case CHIP_JUNIPER:
1291 default:
1292 force_no_swizzle = true;
1293 break;
1294 }
1295 if (force_no_swizzle) {
1296 bool last_backend_enabled = false;
1297
1298 force_no_swizzle = false;
1299 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1300 if (((enabled_backends_mask >> i) & 1) == 1) {
1301 if (last_backend_enabled)
1302 force_no_swizzle = true;
1303 last_backend_enabled = true;
1304 } else
1305 last_backend_enabled = false;
1306 }
1307 }
1308
1309 switch (num_tile_pipes) {
1310 case 1:
1311 case 3:
1312 case 5:
1313 case 7:
1314 DRM_ERROR("odd number of pipes!\n");
1315 break;
1316 case 2:
1317 swizzle_pipe[0] = 0;
1318 swizzle_pipe[1] = 1;
1319 break;
1320 case 4:
1321 if (force_no_swizzle) {
1322 swizzle_pipe[0] = 0;
1323 swizzle_pipe[1] = 1;
1324 swizzle_pipe[2] = 2;
1325 swizzle_pipe[3] = 3;
1326 } else {
1327 swizzle_pipe[0] = 0;
1328 swizzle_pipe[1] = 2;
1329 swizzle_pipe[2] = 1;
1330 swizzle_pipe[3] = 3;
1331 }
1332 break;
1333 case 6:
1334 if (force_no_swizzle) {
1335 swizzle_pipe[0] = 0;
1336 swizzle_pipe[1] = 1;
1337 swizzle_pipe[2] = 2;
1338 swizzle_pipe[3] = 3;
1339 swizzle_pipe[4] = 4;
1340 swizzle_pipe[5] = 5;
1341 } else {
1342 swizzle_pipe[0] = 0;
1343 swizzle_pipe[1] = 2;
1344 swizzle_pipe[2] = 4;
1345 swizzle_pipe[3] = 1;
1346 swizzle_pipe[4] = 3;
1347 swizzle_pipe[5] = 5;
1348 }
1349 break;
1350 case 8:
1351 if (force_no_swizzle) {
1352 swizzle_pipe[0] = 0;
1353 swizzle_pipe[1] = 1;
1354 swizzle_pipe[2] = 2;
1355 swizzle_pipe[3] = 3;
1356 swizzle_pipe[4] = 4;
1357 swizzle_pipe[5] = 5;
1358 swizzle_pipe[6] = 6;
1359 swizzle_pipe[7] = 7;
1360 } else {
1361 swizzle_pipe[0] = 0;
1362 swizzle_pipe[1] = 2;
1363 swizzle_pipe[2] = 4;
1364 swizzle_pipe[3] = 6;
1365 swizzle_pipe[4] = 1;
1366 swizzle_pipe[5] = 3;
1367 swizzle_pipe[6] = 5;
1368 swizzle_pipe[7] = 7;
1369 }
1370 break;
1371 }
1372
1373 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
1374 while (((1 << cur_backend) & enabled_backends_mask) == 0)
1375 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1376
1377 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
1378
1379 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1380 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001381
1382 return backend_map;
1383}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001384
Alex Deucher9535ab72010-11-22 17:56:18 -05001385static void evergreen_program_channel_remap(struct radeon_device *rdev)
1386{
1387 u32 tcp_chan_steer_lo, tcp_chan_steer_hi, mc_shared_chremap, tmp;
1388
1389 tmp = RREG32(MC_SHARED_CHMAP);
1390 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1391 case 0:
1392 case 1:
1393 case 2:
1394 case 3:
1395 default:
1396 /* default mapping */
1397 mc_shared_chremap = 0x00fac688;
1398 break;
1399 }
1400
1401 switch (rdev->family) {
1402 case CHIP_HEMLOCK:
1403 case CHIP_CYPRESS:
1404 tcp_chan_steer_lo = 0x54763210;
1405 tcp_chan_steer_hi = 0x0000ba98;
1406 break;
1407 case CHIP_JUNIPER:
1408 case CHIP_REDWOOD:
1409 case CHIP_CEDAR:
1410 default:
1411 tcp_chan_steer_lo = 0x76543210;
1412 tcp_chan_steer_hi = 0x0000ba98;
1413 break;
1414 }
1415
1416 WREG32(TCP_CHAN_STEER_LO, tcp_chan_steer_lo);
1417 WREG32(TCP_CHAN_STEER_HI, tcp_chan_steer_hi);
1418 WREG32(MC_SHARED_CHREMAP, mc_shared_chremap);
1419}
1420
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001421static void evergreen_gpu_init(struct radeon_device *rdev)
1422{
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001423 u32 cc_rb_backend_disable = 0;
1424 u32 cc_gc_shader_pipe_config;
1425 u32 gb_addr_config = 0;
1426 u32 mc_shared_chmap, mc_arb_ramcfg;
1427 u32 gb_backend_map;
1428 u32 grbm_gfx_index;
1429 u32 sx_debug_1;
1430 u32 smx_dc_ctl0;
1431 u32 sq_config;
1432 u32 sq_lds_resource_mgmt;
1433 u32 sq_gpr_resource_mgmt_1;
1434 u32 sq_gpr_resource_mgmt_2;
1435 u32 sq_gpr_resource_mgmt_3;
1436 u32 sq_thread_resource_mgmt;
1437 u32 sq_thread_resource_mgmt_2;
1438 u32 sq_stack_resource_mgmt_1;
1439 u32 sq_stack_resource_mgmt_2;
1440 u32 sq_stack_resource_mgmt_3;
1441 u32 vgt_cache_invalidation;
1442 u32 hdp_host_path_cntl;
1443 int i, j, num_shader_engines, ps_thread_count;
1444
1445 switch (rdev->family) {
1446 case CHIP_CYPRESS:
1447 case CHIP_HEMLOCK:
1448 rdev->config.evergreen.num_ses = 2;
1449 rdev->config.evergreen.max_pipes = 4;
1450 rdev->config.evergreen.max_tile_pipes = 8;
1451 rdev->config.evergreen.max_simds = 10;
1452 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1453 rdev->config.evergreen.max_gprs = 256;
1454 rdev->config.evergreen.max_threads = 248;
1455 rdev->config.evergreen.max_gs_threads = 32;
1456 rdev->config.evergreen.max_stack_entries = 512;
1457 rdev->config.evergreen.sx_num_of_sets = 4;
1458 rdev->config.evergreen.sx_max_export_size = 256;
1459 rdev->config.evergreen.sx_max_export_pos_size = 64;
1460 rdev->config.evergreen.sx_max_export_smx_size = 192;
1461 rdev->config.evergreen.max_hw_contexts = 8;
1462 rdev->config.evergreen.sq_num_cf_insts = 2;
1463
1464 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1465 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1466 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1467 break;
1468 case CHIP_JUNIPER:
1469 rdev->config.evergreen.num_ses = 1;
1470 rdev->config.evergreen.max_pipes = 4;
1471 rdev->config.evergreen.max_tile_pipes = 4;
1472 rdev->config.evergreen.max_simds = 10;
1473 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1474 rdev->config.evergreen.max_gprs = 256;
1475 rdev->config.evergreen.max_threads = 248;
1476 rdev->config.evergreen.max_gs_threads = 32;
1477 rdev->config.evergreen.max_stack_entries = 512;
1478 rdev->config.evergreen.sx_num_of_sets = 4;
1479 rdev->config.evergreen.sx_max_export_size = 256;
1480 rdev->config.evergreen.sx_max_export_pos_size = 64;
1481 rdev->config.evergreen.sx_max_export_smx_size = 192;
1482 rdev->config.evergreen.max_hw_contexts = 8;
1483 rdev->config.evergreen.sq_num_cf_insts = 2;
1484
1485 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1486 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1487 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1488 break;
1489 case CHIP_REDWOOD:
1490 rdev->config.evergreen.num_ses = 1;
1491 rdev->config.evergreen.max_pipes = 4;
1492 rdev->config.evergreen.max_tile_pipes = 4;
1493 rdev->config.evergreen.max_simds = 5;
1494 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1495 rdev->config.evergreen.max_gprs = 256;
1496 rdev->config.evergreen.max_threads = 248;
1497 rdev->config.evergreen.max_gs_threads = 32;
1498 rdev->config.evergreen.max_stack_entries = 256;
1499 rdev->config.evergreen.sx_num_of_sets = 4;
1500 rdev->config.evergreen.sx_max_export_size = 256;
1501 rdev->config.evergreen.sx_max_export_pos_size = 64;
1502 rdev->config.evergreen.sx_max_export_smx_size = 192;
1503 rdev->config.evergreen.max_hw_contexts = 8;
1504 rdev->config.evergreen.sq_num_cf_insts = 2;
1505
1506 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1507 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1508 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1509 break;
1510 case CHIP_CEDAR:
1511 default:
1512 rdev->config.evergreen.num_ses = 1;
1513 rdev->config.evergreen.max_pipes = 2;
1514 rdev->config.evergreen.max_tile_pipes = 2;
1515 rdev->config.evergreen.max_simds = 2;
1516 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1517 rdev->config.evergreen.max_gprs = 256;
1518 rdev->config.evergreen.max_threads = 192;
1519 rdev->config.evergreen.max_gs_threads = 16;
1520 rdev->config.evergreen.max_stack_entries = 256;
1521 rdev->config.evergreen.sx_num_of_sets = 4;
1522 rdev->config.evergreen.sx_max_export_size = 128;
1523 rdev->config.evergreen.sx_max_export_pos_size = 32;
1524 rdev->config.evergreen.sx_max_export_smx_size = 96;
1525 rdev->config.evergreen.max_hw_contexts = 4;
1526 rdev->config.evergreen.sq_num_cf_insts = 1;
1527
1528 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1529 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1530 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1531 break;
1532 }
1533
1534 /* Initialize HDP */
1535 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1536 WREG32((0x2c14 + j), 0x00000000);
1537 WREG32((0x2c18 + j), 0x00000000);
1538 WREG32((0x2c1c + j), 0x00000000);
1539 WREG32((0x2c20 + j), 0x00000000);
1540 WREG32((0x2c24 + j), 0x00000000);
1541 }
1542
1543 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1544
1545 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
1546
1547 cc_gc_shader_pipe_config |=
1548 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
1549 & EVERGREEN_MAX_PIPES_MASK);
1550 cc_gc_shader_pipe_config |=
1551 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
1552 & EVERGREEN_MAX_SIMDS_MASK);
1553
1554 cc_rb_backend_disable =
1555 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
1556 & EVERGREEN_MAX_BACKENDS_MASK);
1557
1558
1559 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1560 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1561
1562 switch (rdev->config.evergreen.max_tile_pipes) {
1563 case 1:
1564 default:
1565 gb_addr_config |= NUM_PIPES(0);
1566 break;
1567 case 2:
1568 gb_addr_config |= NUM_PIPES(1);
1569 break;
1570 case 4:
1571 gb_addr_config |= NUM_PIPES(2);
1572 break;
1573 case 8:
1574 gb_addr_config |= NUM_PIPES(3);
1575 break;
1576 }
1577
1578 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1579 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1580 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1581 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1582 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1583 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1584
1585 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1586 gb_addr_config |= ROW_SIZE(2);
1587 else
1588 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1589
1590 if (rdev->ddev->pdev->device == 0x689e) {
1591 u32 efuse_straps_4;
1592 u32 efuse_straps_3;
1593 u8 efuse_box_bit_131_124;
1594
1595 WREG32(RCU_IND_INDEX, 0x204);
1596 efuse_straps_4 = RREG32(RCU_IND_DATA);
1597 WREG32(RCU_IND_INDEX, 0x203);
1598 efuse_straps_3 = RREG32(RCU_IND_DATA);
1599 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1600
1601 switch(efuse_box_bit_131_124) {
1602 case 0x00:
1603 gb_backend_map = 0x76543210;
1604 break;
1605 case 0x55:
1606 gb_backend_map = 0x77553311;
1607 break;
1608 case 0x56:
1609 gb_backend_map = 0x77553300;
1610 break;
1611 case 0x59:
1612 gb_backend_map = 0x77552211;
1613 break;
1614 case 0x66:
1615 gb_backend_map = 0x77443300;
1616 break;
1617 case 0x99:
1618 gb_backend_map = 0x66552211;
1619 break;
1620 case 0x5a:
1621 gb_backend_map = 0x77552200;
1622 break;
1623 case 0xaa:
1624 gb_backend_map = 0x66442200;
1625 break;
1626 case 0x95:
1627 gb_backend_map = 0x66553311;
1628 break;
1629 default:
1630 DRM_ERROR("bad backend map, using default\n");
1631 gb_backend_map =
1632 evergreen_get_tile_pipe_to_backend_map(rdev,
1633 rdev->config.evergreen.max_tile_pipes,
1634 rdev->config.evergreen.max_backends,
1635 ((EVERGREEN_MAX_BACKENDS_MASK <<
1636 rdev->config.evergreen.max_backends) &
1637 EVERGREEN_MAX_BACKENDS_MASK));
1638 break;
1639 }
1640 } else if (rdev->ddev->pdev->device == 0x68b9) {
1641 u32 efuse_straps_3;
1642 u8 efuse_box_bit_127_124;
1643
1644 WREG32(RCU_IND_INDEX, 0x203);
1645 efuse_straps_3 = RREG32(RCU_IND_DATA);
Alex Deucherd31dba52010-10-11 12:41:32 -04001646 efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001647
1648 switch(efuse_box_bit_127_124) {
1649 case 0x0:
1650 gb_backend_map = 0x00003210;
1651 break;
1652 case 0x5:
1653 case 0x6:
1654 case 0x9:
1655 case 0xa:
1656 gb_backend_map = 0x00003311;
1657 break;
1658 default:
1659 DRM_ERROR("bad backend map, using default\n");
1660 gb_backend_map =
1661 evergreen_get_tile_pipe_to_backend_map(rdev,
1662 rdev->config.evergreen.max_tile_pipes,
1663 rdev->config.evergreen.max_backends,
1664 ((EVERGREEN_MAX_BACKENDS_MASK <<
1665 rdev->config.evergreen.max_backends) &
1666 EVERGREEN_MAX_BACKENDS_MASK));
1667 break;
1668 }
Alex Deucherb741be82010-09-09 19:15:23 -04001669 } else {
1670 switch (rdev->family) {
1671 case CHIP_CYPRESS:
1672 case CHIP_HEMLOCK:
1673 gb_backend_map = 0x66442200;
1674 break;
1675 case CHIP_JUNIPER:
1676 gb_backend_map = 0x00006420;
1677 break;
1678 default:
1679 gb_backend_map =
1680 evergreen_get_tile_pipe_to_backend_map(rdev,
1681 rdev->config.evergreen.max_tile_pipes,
1682 rdev->config.evergreen.max_backends,
1683 ((EVERGREEN_MAX_BACKENDS_MASK <<
1684 rdev->config.evergreen.max_backends) &
1685 EVERGREEN_MAX_BACKENDS_MASK));
1686 }
1687 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001688
Alex Deucher1aa52bd2010-11-17 12:11:03 -05001689 /* setup tiling info dword. gb_addr_config is not adequate since it does
1690 * not have bank info, so create a custom tiling dword.
1691 * bits 3:0 num_pipes
1692 * bits 7:4 num_banks
1693 * bits 11:8 group_size
1694 * bits 15:12 row_size
1695 */
1696 rdev->config.evergreen.tile_config = 0;
1697 switch (rdev->config.evergreen.max_tile_pipes) {
1698 case 1:
1699 default:
1700 rdev->config.evergreen.tile_config |= (0 << 0);
1701 break;
1702 case 2:
1703 rdev->config.evergreen.tile_config |= (1 << 0);
1704 break;
1705 case 4:
1706 rdev->config.evergreen.tile_config |= (2 << 0);
1707 break;
1708 case 8:
1709 rdev->config.evergreen.tile_config |= (3 << 0);
1710 break;
1711 }
1712 rdev->config.evergreen.tile_config |=
1713 ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) << 4;
1714 rdev->config.evergreen.tile_config |=
1715 ((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT) << 8;
1716 rdev->config.evergreen.tile_config |=
1717 ((gb_addr_config & 0x30000000) >> 28) << 12;
1718
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001719 WREG32(GB_BACKEND_MAP, gb_backend_map);
1720 WREG32(GB_ADDR_CONFIG, gb_addr_config);
1721 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1722 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1723
Alex Deucher9535ab72010-11-22 17:56:18 -05001724 evergreen_program_channel_remap(rdev);
1725
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001726 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1727 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1728
1729 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1730 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1731 u32 sp = cc_gc_shader_pipe_config;
1732 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1733
1734 if (i == num_shader_engines) {
1735 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1736 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1737 }
1738
1739 WREG32(GRBM_GFX_INDEX, gfx);
1740 WREG32(RLC_GFX_INDEX, gfx);
1741
1742 WREG32(CC_RB_BACKEND_DISABLE, rb);
1743 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1744 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1745 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1746 }
1747
1748 grbm_gfx_index |= SE_BROADCAST_WRITES;
1749 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1750 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1751
1752 WREG32(CGTS_SYS_TCC_DISABLE, 0);
1753 WREG32(CGTS_TCC_DISABLE, 0);
1754 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1755 WREG32(CGTS_USER_TCC_DISABLE, 0);
1756
1757 /* set HW defaults for 3D engine */
1758 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1759 ROQ_IB2_START(0x2b)));
1760
1761 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1762
1763 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1764 SYNC_GRADIENT |
1765 SYNC_WALKER |
1766 SYNC_ALIGNER));
1767
1768 sx_debug_1 = RREG32(SX_DEBUG_1);
1769 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1770 WREG32(SX_DEBUG_1, sx_debug_1);
1771
1772
1773 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1774 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1775 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1776 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1777
1778 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1779 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1780 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1781
1782 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1783 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1784 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1785
1786 WREG32(VGT_NUM_INSTANCES, 1);
1787 WREG32(SPI_CONFIG_CNTL, 0);
1788 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1789 WREG32(CP_PERFMON_CNTL, 0);
1790
1791 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1792 FETCH_FIFO_HIWATER(0x4) |
1793 DONE_FIFO_HIWATER(0xe0) |
1794 ALU_UPDATE_FIFO_HIWATER(0x8)));
1795
1796 sq_config = RREG32(SQ_CONFIG);
1797 sq_config &= ~(PS_PRIO(3) |
1798 VS_PRIO(3) |
1799 GS_PRIO(3) |
1800 ES_PRIO(3));
1801 sq_config |= (VC_ENABLE |
1802 EXPORT_SRC_C |
1803 PS_PRIO(0) |
1804 VS_PRIO(1) |
1805 GS_PRIO(2) |
1806 ES_PRIO(3));
1807
1808 if (rdev->family == CHIP_CEDAR)
1809 /* no vertex cache */
1810 sq_config &= ~VC_ENABLE;
1811
1812 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1813
1814 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1815 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1816 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1817 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1818 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1819 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1820 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1821
1822 if (rdev->family == CHIP_CEDAR)
1823 ps_thread_count = 96;
1824 else
1825 ps_thread_count = 128;
1826
1827 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04001828 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1829 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1830 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1831 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1832 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001833
1834 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1835 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1836 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1837 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1838 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1839 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1840
1841 WREG32(SQ_CONFIG, sq_config);
1842 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1843 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1844 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1845 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1846 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1847 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1848 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1849 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1850 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1851 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1852
1853 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1854 FORCE_EOV_MAX_REZ_CNT(255)));
1855
1856 if (rdev->family == CHIP_CEDAR)
1857 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1858 else
1859 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1860 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1861 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1862
1863 WREG32(VGT_GS_VERTEX_REUSE, 16);
1864 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1865
Alex Deucher60a4a3e2010-06-29 17:03:35 -04001866 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
1867 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
1868
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001869 WREG32(CB_PERF_CTR0_SEL_0, 0);
1870 WREG32(CB_PERF_CTR0_SEL_1, 0);
1871 WREG32(CB_PERF_CTR1_SEL_0, 0);
1872 WREG32(CB_PERF_CTR1_SEL_1, 0);
1873 WREG32(CB_PERF_CTR2_SEL_0, 0);
1874 WREG32(CB_PERF_CTR2_SEL_1, 0);
1875 WREG32(CB_PERF_CTR3_SEL_0, 0);
1876 WREG32(CB_PERF_CTR3_SEL_1, 0);
1877
Alex Deucher60a4a3e2010-06-29 17:03:35 -04001878 /* clear render buffer base addresses */
1879 WREG32(CB_COLOR0_BASE, 0);
1880 WREG32(CB_COLOR1_BASE, 0);
1881 WREG32(CB_COLOR2_BASE, 0);
1882 WREG32(CB_COLOR3_BASE, 0);
1883 WREG32(CB_COLOR4_BASE, 0);
1884 WREG32(CB_COLOR5_BASE, 0);
1885 WREG32(CB_COLOR6_BASE, 0);
1886 WREG32(CB_COLOR7_BASE, 0);
1887 WREG32(CB_COLOR8_BASE, 0);
1888 WREG32(CB_COLOR9_BASE, 0);
1889 WREG32(CB_COLOR10_BASE, 0);
1890 WREG32(CB_COLOR11_BASE, 0);
1891
1892 /* set the shader const cache sizes to 0 */
1893 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
1894 WREG32(i, 0);
1895 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
1896 WREG32(i, 0);
1897
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001898 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1899 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1900
1901 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1902
1903 udelay(50);
1904
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001905}
1906
1907int evergreen_mc_init(struct radeon_device *rdev)
1908{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001909 u32 tmp;
1910 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001911
1912 /* Get VRAM informations */
1913 rdev->mc.vram_is_ddr = true;
1914 tmp = RREG32(MC_ARB_RAMCFG);
1915 if (tmp & CHANSIZE_OVERRIDE) {
1916 chansize = 16;
1917 } else if (tmp & CHANSIZE_MASK) {
1918 chansize = 64;
1919 } else {
1920 chansize = 32;
1921 }
1922 tmp = RREG32(MC_SHARED_CHMAP);
1923 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1924 case 0:
1925 default:
1926 numchan = 1;
1927 break;
1928 case 1:
1929 numchan = 2;
1930 break;
1931 case 2:
1932 numchan = 4;
1933 break;
1934 case 3:
1935 numchan = 8;
1936 break;
1937 }
1938 rdev->mc.vram_width = numchan * chansize;
1939 /* Could aper size report 0 ? */
Jordan Crouse01d73a62010-05-27 13:40:24 -06001940 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1941 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001942 /* Setup GPU memory space */
1943 /* size in MB on evergreen */
1944 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1945 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00001946 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Jerome Glissec919b372010-08-10 17:41:31 -04001947 rdev->mc.active_vram_size = rdev->mc.visible_vram_size;
Alex Deucher0ef0c1f2010-11-22 17:56:26 -05001948 r700_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04001949 radeon_update_bandwidth_info(rdev);
1950
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001951 return 0;
1952}
Jerome Glissed594e462010-02-17 21:54:29 +00001953
Jerome Glisse225758d2010-03-09 14:45:10 +00001954bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1955{
1956 /* FIXME: implement for evergreen */
1957 return false;
1958}
1959
Alex Deucher747943e2010-03-24 13:26:36 -04001960static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1961{
1962 struct evergreen_mc_save save;
1963 u32 srbm_reset = 0;
1964 u32 grbm_reset = 0;
1965
1966 dev_info(rdev->dev, "GPU softreset \n");
1967 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1968 RREG32(GRBM_STATUS));
1969 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1970 RREG32(GRBM_STATUS_SE0));
1971 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1972 RREG32(GRBM_STATUS_SE1));
1973 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1974 RREG32(SRBM_STATUS));
1975 evergreen_mc_stop(rdev, &save);
1976 if (evergreen_mc_wait_for_idle(rdev)) {
1977 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1978 }
1979 /* Disable CP parsing/prefetching */
1980 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1981
1982 /* reset all the gfx blocks */
1983 grbm_reset = (SOFT_RESET_CP |
1984 SOFT_RESET_CB |
1985 SOFT_RESET_DB |
1986 SOFT_RESET_PA |
1987 SOFT_RESET_SC |
1988 SOFT_RESET_SPI |
1989 SOFT_RESET_SH |
1990 SOFT_RESET_SX |
1991 SOFT_RESET_TC |
1992 SOFT_RESET_TA |
1993 SOFT_RESET_VC |
1994 SOFT_RESET_VGT);
1995
1996 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1997 WREG32(GRBM_SOFT_RESET, grbm_reset);
1998 (void)RREG32(GRBM_SOFT_RESET);
1999 udelay(50);
2000 WREG32(GRBM_SOFT_RESET, 0);
2001 (void)RREG32(GRBM_SOFT_RESET);
2002
2003 /* reset all the system blocks */
2004 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
2005
2006 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
2007 WREG32(SRBM_SOFT_RESET, srbm_reset);
2008 (void)RREG32(SRBM_SOFT_RESET);
2009 udelay(50);
2010 WREG32(SRBM_SOFT_RESET, 0);
2011 (void)RREG32(SRBM_SOFT_RESET);
2012 /* Wait a little for things to settle down */
2013 udelay(50);
2014 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
2015 RREG32(GRBM_STATUS));
2016 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
2017 RREG32(GRBM_STATUS_SE0));
2018 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
2019 RREG32(GRBM_STATUS_SE1));
2020 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
2021 RREG32(SRBM_STATUS));
2022 /* After reset we need to reinit the asic as GPU often endup in an
2023 * incoherent state.
2024 */
2025 atom_asic_init(rdev->mode_info.atom_context);
2026 evergreen_mc_resume(rdev, &save);
2027 return 0;
2028}
2029
Jerome Glissea2d07b72010-03-09 14:45:11 +00002030int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002031{
Alex Deucher747943e2010-03-24 13:26:36 -04002032 return evergreen_gpu_soft_reset(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002033}
2034
Alex Deucher45f9a392010-03-24 13:55:51 -04002035/* Interrupts */
2036
2037u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2038{
2039 switch (crtc) {
2040 case 0:
2041 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
2042 case 1:
2043 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
2044 case 2:
2045 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
2046 case 3:
2047 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
2048 case 4:
2049 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
2050 case 5:
2051 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
2052 default:
2053 return 0;
2054 }
2055}
2056
2057void evergreen_disable_interrupt_state(struct radeon_device *rdev)
2058{
2059 u32 tmp;
2060
Alex Deucher3555e532010-10-08 12:09:12 -04002061 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Alex Deucher45f9a392010-03-24 13:55:51 -04002062 WREG32(GRBM_INT_CNTL, 0);
2063 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2064 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2065 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2066 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2067 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2068 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2069
2070 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2071 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2072 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2073 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2074 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2075 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2076
2077 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2078 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2079
2080 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2081 WREG32(DC_HPD1_INT_CONTROL, tmp);
2082 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2083 WREG32(DC_HPD2_INT_CONTROL, tmp);
2084 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2085 WREG32(DC_HPD3_INT_CONTROL, tmp);
2086 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2087 WREG32(DC_HPD4_INT_CONTROL, tmp);
2088 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2089 WREG32(DC_HPD5_INT_CONTROL, tmp);
2090 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2091 WREG32(DC_HPD6_INT_CONTROL, tmp);
2092
2093}
2094
2095int evergreen_irq_set(struct radeon_device *rdev)
2096{
2097 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2098 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2099 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04002100 u32 grbm_int_cntl = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04002101
2102 if (!rdev->irq.installed) {
Joe Perchesfce7d612010-10-30 21:08:30 +00002103 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Alex Deucher45f9a392010-03-24 13:55:51 -04002104 return -EINVAL;
2105 }
2106 /* don't enable anything if the ih is disabled */
2107 if (!rdev->ih.enabled) {
2108 r600_disable_interrupts(rdev);
2109 /* force the active interrupt state to all disabled */
2110 evergreen_disable_interrupt_state(rdev);
2111 return 0;
2112 }
2113
2114 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2115 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2116 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2117 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2118 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2119 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
2120
2121 if (rdev->irq.sw_int) {
2122 DRM_DEBUG("evergreen_irq_set: sw int\n");
2123 cp_int_cntl |= RB_INT_ENABLE;
Alex Deucherd0f8a852010-09-04 05:04:34 -04002124 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
Alex Deucher45f9a392010-03-24 13:55:51 -04002125 }
2126 if (rdev->irq.crtc_vblank_int[0]) {
2127 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2128 crtc1 |= VBLANK_INT_MASK;
2129 }
2130 if (rdev->irq.crtc_vblank_int[1]) {
2131 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2132 crtc2 |= VBLANK_INT_MASK;
2133 }
2134 if (rdev->irq.crtc_vblank_int[2]) {
2135 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2136 crtc3 |= VBLANK_INT_MASK;
2137 }
2138 if (rdev->irq.crtc_vblank_int[3]) {
2139 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2140 crtc4 |= VBLANK_INT_MASK;
2141 }
2142 if (rdev->irq.crtc_vblank_int[4]) {
2143 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2144 crtc5 |= VBLANK_INT_MASK;
2145 }
2146 if (rdev->irq.crtc_vblank_int[5]) {
2147 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2148 crtc6 |= VBLANK_INT_MASK;
2149 }
2150 if (rdev->irq.hpd[0]) {
2151 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
2152 hpd1 |= DC_HPDx_INT_EN;
2153 }
2154 if (rdev->irq.hpd[1]) {
2155 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
2156 hpd2 |= DC_HPDx_INT_EN;
2157 }
2158 if (rdev->irq.hpd[2]) {
2159 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
2160 hpd3 |= DC_HPDx_INT_EN;
2161 }
2162 if (rdev->irq.hpd[3]) {
2163 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
2164 hpd4 |= DC_HPDx_INT_EN;
2165 }
2166 if (rdev->irq.hpd[4]) {
2167 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
2168 hpd5 |= DC_HPDx_INT_EN;
2169 }
2170 if (rdev->irq.hpd[5]) {
2171 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2172 hpd6 |= DC_HPDx_INT_EN;
2173 }
Alex Deucher2031f772010-04-22 12:52:11 -04002174 if (rdev->irq.gui_idle) {
2175 DRM_DEBUG("gui idle\n");
2176 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
2177 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002178
2179 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher2031f772010-04-22 12:52:11 -04002180 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04002181
2182 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
2183 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
2184 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
2185 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
2186 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2187 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2188
2189 WREG32(DC_HPD1_INT_CONTROL, hpd1);
2190 WREG32(DC_HPD2_INT_CONTROL, hpd2);
2191 WREG32(DC_HPD3_INT_CONTROL, hpd3);
2192 WREG32(DC_HPD4_INT_CONTROL, hpd4);
2193 WREG32(DC_HPD5_INT_CONTROL, hpd5);
2194 WREG32(DC_HPD6_INT_CONTROL, hpd6);
2195
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002196 return 0;
2197}
2198
Alex Deucher45f9a392010-03-24 13:55:51 -04002199static inline void evergreen_irq_ack(struct radeon_device *rdev,
2200 u32 *disp_int,
2201 u32 *disp_int_cont,
2202 u32 *disp_int_cont2,
2203 u32 *disp_int_cont3,
2204 u32 *disp_int_cont4,
2205 u32 *disp_int_cont5)
2206{
2207 u32 tmp;
2208
2209 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
2210 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2211 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2212 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2213 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2214 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2215
2216 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
2217 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2218 if (*disp_int & LB_D1_VLINE_INTERRUPT)
2219 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2220
2221 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2222 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2223 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
2224 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2225
2226 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2227 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2228 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2229 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2230
2231 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2232 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2233 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2234 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2235
2236 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2237 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2238 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2239 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2240
2241 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2242 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2243 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2244 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2245
2246 if (*disp_int & DC_HPD1_INTERRUPT) {
2247 tmp = RREG32(DC_HPD1_INT_CONTROL);
2248 tmp |= DC_HPDx_INT_ACK;
2249 WREG32(DC_HPD1_INT_CONTROL, tmp);
2250 }
2251 if (*disp_int_cont & DC_HPD2_INTERRUPT) {
2252 tmp = RREG32(DC_HPD2_INT_CONTROL);
2253 tmp |= DC_HPDx_INT_ACK;
2254 WREG32(DC_HPD2_INT_CONTROL, tmp);
2255 }
2256 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
2257 tmp = RREG32(DC_HPD3_INT_CONTROL);
2258 tmp |= DC_HPDx_INT_ACK;
2259 WREG32(DC_HPD3_INT_CONTROL, tmp);
2260 }
2261 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
2262 tmp = RREG32(DC_HPD4_INT_CONTROL);
2263 tmp |= DC_HPDx_INT_ACK;
2264 WREG32(DC_HPD4_INT_CONTROL, tmp);
2265 }
2266 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
2267 tmp = RREG32(DC_HPD5_INT_CONTROL);
2268 tmp |= DC_HPDx_INT_ACK;
2269 WREG32(DC_HPD5_INT_CONTROL, tmp);
2270 }
2271 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
2272 tmp = RREG32(DC_HPD5_INT_CONTROL);
2273 tmp |= DC_HPDx_INT_ACK;
2274 WREG32(DC_HPD6_INT_CONTROL, tmp);
2275 }
2276}
2277
2278void evergreen_irq_disable(struct radeon_device *rdev)
2279{
2280 u32 disp_int, disp_int_cont, disp_int_cont2;
2281 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2282
2283 r600_disable_interrupts(rdev);
2284 /* Wait and acknowledge irq */
2285 mdelay(1);
2286 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
2287 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2288 evergreen_disable_interrupt_state(rdev);
2289}
2290
2291static void evergreen_irq_suspend(struct radeon_device *rdev)
2292{
2293 evergreen_irq_disable(rdev);
2294 r600_rlc_stop(rdev);
2295}
2296
2297static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2298{
2299 u32 wptr, tmp;
2300
Alex Deucher724c80e2010-08-27 18:25:25 -04002301 if (rdev->wb.enabled)
2302 wptr = rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4];
2303 else
2304 wptr = RREG32(IH_RB_WPTR);
Alex Deucher45f9a392010-03-24 13:55:51 -04002305
2306 if (wptr & RB_OVERFLOW) {
2307 /* When a ring buffer overflow happen start parsing interrupt
2308 * from the last not overwritten vector (wptr + 16). Hopefully
2309 * this should allow us to catchup.
2310 */
2311 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
2312 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
2313 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
2314 tmp = RREG32(IH_RB_CNTL);
2315 tmp |= IH_WPTR_OVERFLOW_CLEAR;
2316 WREG32(IH_RB_CNTL, tmp);
2317 }
2318 return (wptr & rdev->ih.ptr_mask);
2319}
2320
2321int evergreen_irq_process(struct radeon_device *rdev)
2322{
2323 u32 wptr = evergreen_get_ih_wptr(rdev);
2324 u32 rptr = rdev->ih.rptr;
2325 u32 src_id, src_data;
2326 u32 ring_index;
2327 u32 disp_int, disp_int_cont, disp_int_cont2;
2328 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2329 unsigned long flags;
2330 bool queue_hotplug = false;
2331
2332 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
2333 if (!rdev->ih.enabled)
2334 return IRQ_NONE;
2335
2336 spin_lock_irqsave(&rdev->ih.lock, flags);
2337
2338 if (rptr == wptr) {
2339 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2340 return IRQ_NONE;
2341 }
2342 if (rdev->shutdown) {
2343 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2344 return IRQ_NONE;
2345 }
2346
2347restart_ih:
2348 /* display interrupts */
2349 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
2350 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2351
2352 rdev->ih.wptr = wptr;
2353 while (rptr != wptr) {
2354 /* wptr/rptr are in bytes! */
2355 ring_index = rptr / 4;
2356 src_id = rdev->ih.ring[ring_index] & 0xff;
2357 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
2358
2359 switch (src_id) {
2360 case 1: /* D1 vblank/vline */
2361 switch (src_data) {
2362 case 0: /* D1 vblank */
2363 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
2364 drm_handle_vblank(rdev->ddev, 0);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002365 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002366 wake_up(&rdev->irq.vblank_queue);
2367 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2368 DRM_DEBUG("IH: D1 vblank\n");
2369 }
2370 break;
2371 case 1: /* D1 vline */
2372 if (disp_int & LB_D1_VLINE_INTERRUPT) {
2373 disp_int &= ~LB_D1_VLINE_INTERRUPT;
2374 DRM_DEBUG("IH: D1 vline\n");
2375 }
2376 break;
2377 default:
2378 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2379 break;
2380 }
2381 break;
2382 case 2: /* D2 vblank/vline */
2383 switch (src_data) {
2384 case 0: /* D2 vblank */
2385 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
2386 drm_handle_vblank(rdev->ddev, 1);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002387 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002388 wake_up(&rdev->irq.vblank_queue);
2389 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
2390 DRM_DEBUG("IH: D2 vblank\n");
2391 }
2392 break;
2393 case 1: /* D2 vline */
2394 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
2395 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
2396 DRM_DEBUG("IH: D2 vline\n");
2397 }
2398 break;
2399 default:
2400 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2401 break;
2402 }
2403 break;
2404 case 3: /* D3 vblank/vline */
2405 switch (src_data) {
2406 case 0: /* D3 vblank */
2407 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
2408 drm_handle_vblank(rdev->ddev, 2);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002409 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002410 wake_up(&rdev->irq.vblank_queue);
2411 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
2412 DRM_DEBUG("IH: D3 vblank\n");
2413 }
2414 break;
2415 case 1: /* D3 vline */
2416 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
2417 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
2418 DRM_DEBUG("IH: D3 vline\n");
2419 }
2420 break;
2421 default:
2422 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2423 break;
2424 }
2425 break;
2426 case 4: /* D4 vblank/vline */
2427 switch (src_data) {
2428 case 0: /* D4 vblank */
2429 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
2430 drm_handle_vblank(rdev->ddev, 3);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002431 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002432 wake_up(&rdev->irq.vblank_queue);
2433 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
2434 DRM_DEBUG("IH: D4 vblank\n");
2435 }
2436 break;
2437 case 1: /* D4 vline */
2438 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
2439 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
2440 DRM_DEBUG("IH: D4 vline\n");
2441 }
2442 break;
2443 default:
2444 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2445 break;
2446 }
2447 break;
2448 case 5: /* D5 vblank/vline */
2449 switch (src_data) {
2450 case 0: /* D5 vblank */
2451 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
2452 drm_handle_vblank(rdev->ddev, 4);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002453 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002454 wake_up(&rdev->irq.vblank_queue);
2455 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
2456 DRM_DEBUG("IH: D5 vblank\n");
2457 }
2458 break;
2459 case 1: /* D5 vline */
2460 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
2461 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
2462 DRM_DEBUG("IH: D5 vline\n");
2463 }
2464 break;
2465 default:
2466 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2467 break;
2468 }
2469 break;
2470 case 6: /* D6 vblank/vline */
2471 switch (src_data) {
2472 case 0: /* D6 vblank */
2473 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
2474 drm_handle_vblank(rdev->ddev, 5);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002475 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002476 wake_up(&rdev->irq.vblank_queue);
2477 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
2478 DRM_DEBUG("IH: D6 vblank\n");
2479 }
2480 break;
2481 case 1: /* D6 vline */
2482 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
2483 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
2484 DRM_DEBUG("IH: D6 vline\n");
2485 }
2486 break;
2487 default:
2488 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2489 break;
2490 }
2491 break;
2492 case 42: /* HPD hotplug */
2493 switch (src_data) {
2494 case 0:
2495 if (disp_int & DC_HPD1_INTERRUPT) {
2496 disp_int &= ~DC_HPD1_INTERRUPT;
2497 queue_hotplug = true;
2498 DRM_DEBUG("IH: HPD1\n");
2499 }
2500 break;
2501 case 1:
2502 if (disp_int_cont & DC_HPD2_INTERRUPT) {
2503 disp_int_cont &= ~DC_HPD2_INTERRUPT;
2504 queue_hotplug = true;
2505 DRM_DEBUG("IH: HPD2\n");
2506 }
2507 break;
2508 case 2:
2509 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
2510 disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
2511 queue_hotplug = true;
2512 DRM_DEBUG("IH: HPD3\n");
2513 }
2514 break;
2515 case 3:
2516 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
2517 disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
2518 queue_hotplug = true;
2519 DRM_DEBUG("IH: HPD4\n");
2520 }
2521 break;
2522 case 4:
2523 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
2524 disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
2525 queue_hotplug = true;
2526 DRM_DEBUG("IH: HPD5\n");
2527 }
2528 break;
2529 case 5:
2530 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
2531 disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2532 queue_hotplug = true;
2533 DRM_DEBUG("IH: HPD6\n");
2534 }
2535 break;
2536 default:
2537 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2538 break;
2539 }
2540 break;
2541 case 176: /* CP_INT in ring buffer */
2542 case 177: /* CP_INT in IB1 */
2543 case 178: /* CP_INT in IB2 */
2544 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2545 radeon_fence_process(rdev);
2546 break;
2547 case 181: /* CP EOP event */
2548 DRM_DEBUG("IH: CP EOP\n");
Alex Deucherd0f8a852010-09-04 05:04:34 -04002549 radeon_fence_process(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04002550 break;
Alex Deucher2031f772010-04-22 12:52:11 -04002551 case 233: /* GUI IDLE */
2552 DRM_DEBUG("IH: CP EOP\n");
2553 rdev->pm.gui_idle = true;
2554 wake_up(&rdev->irq.idle_queue);
2555 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04002556 default:
2557 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2558 break;
2559 }
2560
2561 /* wptr/rptr are in bytes! */
2562 rptr += 16;
2563 rptr &= rdev->ih.ptr_mask;
2564 }
2565 /* make sure wptr hasn't changed while processing */
2566 wptr = evergreen_get_ih_wptr(rdev);
2567 if (wptr != rdev->ih.wptr)
2568 goto restart_ih;
2569 if (queue_hotplug)
2570 queue_work(rdev->wq, &rdev->hotplug_work);
2571 rdev->ih.rptr = rptr;
2572 WREG32(IH_RB_RPTR, rdev->ih.rptr);
2573 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2574 return IRQ_HANDLED;
2575}
2576
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002577static int evergreen_startup(struct radeon_device *rdev)
2578{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002579 int r;
2580
2581 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2582 r = r600_init_microcode(rdev);
2583 if (r) {
2584 DRM_ERROR("Failed to load firmware!\n");
2585 return r;
2586 }
2587 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002588
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002589 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002590 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04002591 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002592 } else {
2593 r = evergreen_pcie_gart_enable(rdev);
2594 if (r)
2595 return r;
2596 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002597 evergreen_gpu_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002598
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002599 r = evergreen_blit_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002600 if (r) {
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002601 evergreen_blit_fini(rdev);
2602 rdev->asic->copy = NULL;
2603 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002604 }
2605
Alex Deucher724c80e2010-08-27 18:25:25 -04002606 /* allocate wb buffer */
2607 r = radeon_wb_init(rdev);
2608 if (r)
2609 return r;
2610
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002611 /* Enable IRQ */
2612 r = r600_irq_init(rdev);
2613 if (r) {
2614 DRM_ERROR("radeon: IH init failed (%d).\n", r);
2615 radeon_irq_kms_fini(rdev);
2616 return r;
2617 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002618 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002619
2620 r = radeon_ring_init(rdev, rdev->cp.ring_size);
2621 if (r)
2622 return r;
2623 r = evergreen_cp_load_microcode(rdev);
2624 if (r)
2625 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04002626 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002627 if (r)
2628 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04002629
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002630 return 0;
2631}
2632
2633int evergreen_resume(struct radeon_device *rdev)
2634{
2635 int r;
2636
2637 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2638 * posting will perform necessary task to bring back GPU into good
2639 * shape.
2640 */
2641 /* post card */
2642 atom_asic_init(rdev->mode_info.atom_context);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002643
2644 r = evergreen_startup(rdev);
2645 if (r) {
2646 DRM_ERROR("r600 startup failed on resume\n");
2647 return r;
2648 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002649
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002650 r = r600_ib_test(rdev);
2651 if (r) {
2652 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2653 return r;
2654 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002655
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002656 return r;
2657
2658}
2659
2660int evergreen_suspend(struct radeon_device *rdev)
2661{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002662 int r;
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002663
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002664 /* FIXME: we should wait for ring to be empty */
2665 r700_cp_stop(rdev);
2666 rdev->cp.ready = false;
Alex Deucher45f9a392010-03-24 13:55:51 -04002667 evergreen_irq_suspend(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002668 radeon_wb_disable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002669 evergreen_pcie_gart_disable(rdev);
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002670
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002671 /* unpin shaders bo */
2672 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
2673 if (likely(r == 0)) {
2674 radeon_bo_unpin(rdev->r600_blit.shader_obj);
2675 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2676 }
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002677
2678 return 0;
2679}
2680
2681int evergreen_copy_blit(struct radeon_device *rdev,
2682 uint64_t src_offset, uint64_t dst_offset,
2683 unsigned num_pages, struct radeon_fence *fence)
2684{
2685 int r;
2686
2687 mutex_lock(&rdev->r600_blit.mutex);
2688 rdev->r600_blit.vb_ib = NULL;
2689 r = evergreen_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE);
2690 if (r) {
2691 if (rdev->r600_blit.vb_ib)
2692 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
2693 mutex_unlock(&rdev->r600_blit.mutex);
2694 return r;
2695 }
2696 evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
2697 evergreen_blit_done_copy(rdev, fence);
2698 mutex_unlock(&rdev->r600_blit.mutex);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002699 return 0;
2700}
2701
2702static bool evergreen_card_posted(struct radeon_device *rdev)
2703{
2704 u32 reg;
2705
2706 /* first check CRTCs */
2707 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2708 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2709 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2710 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2711 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2712 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2713 if (reg & EVERGREEN_CRTC_MASTER_EN)
2714 return true;
2715
2716 /* then check MEM_SIZE, in case the crtcs are off */
2717 if (RREG32(CONFIG_MEMSIZE))
2718 return true;
2719
2720 return false;
2721}
2722
2723/* Plan is to move initialization in that function and use
2724 * helper function so that radeon_device_init pretty much
2725 * do nothing more than calling asic specific function. This
2726 * should also allow to remove a bunch of callback function
2727 * like vram_info.
2728 */
2729int evergreen_init(struct radeon_device *rdev)
2730{
2731 int r;
2732
2733 r = radeon_dummy_page_init(rdev);
2734 if (r)
2735 return r;
2736 /* This don't do much */
2737 r = radeon_gem_init(rdev);
2738 if (r)
2739 return r;
2740 /* Read BIOS */
2741 if (!radeon_get_bios(rdev)) {
2742 if (ASIC_IS_AVIVO(rdev))
2743 return -EINVAL;
2744 }
2745 /* Must be an ATOMBIOS */
2746 if (!rdev->is_atom_bios) {
2747 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2748 return -EINVAL;
2749 }
2750 r = radeon_atombios_init(rdev);
2751 if (r)
2752 return r;
2753 /* Post card if necessary */
2754 if (!evergreen_card_posted(rdev)) {
2755 if (!rdev->bios) {
2756 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2757 return -EINVAL;
2758 }
2759 DRM_INFO("GPU not posted. posting now...\n");
2760 atom_asic_init(rdev->mode_info.atom_context);
2761 }
2762 /* Initialize scratch registers */
2763 r600_scratch_init(rdev);
2764 /* Initialize surface registers */
2765 radeon_surface_init(rdev);
2766 /* Initialize clocks */
2767 radeon_get_clock_info(rdev->ddev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002768 /* Fence driver */
2769 r = radeon_fence_driver_init(rdev);
2770 if (r)
2771 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00002772 /* initialize AGP */
2773 if (rdev->flags & RADEON_IS_AGP) {
2774 r = radeon_agp_init(rdev);
2775 if (r)
2776 radeon_agp_disable(rdev);
2777 }
2778 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002779 r = evergreen_mc_init(rdev);
2780 if (r)
2781 return r;
2782 /* Memory manager */
2783 r = radeon_bo_init(rdev);
2784 if (r)
2785 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04002786
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002787 r = radeon_irq_kms_init(rdev);
2788 if (r)
2789 return r;
2790
2791 rdev->cp.ring_obj = NULL;
2792 r600_ring_init(rdev, 1024 * 1024);
2793
2794 rdev->ih.ring_obj = NULL;
2795 r600_ih_ring_init(rdev, 64 * 1024);
2796
2797 r = r600_pcie_gart_init(rdev);
2798 if (r)
2799 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002800
Alex Deucher148a03b2010-06-03 19:00:03 -04002801 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002802 r = evergreen_startup(rdev);
2803 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04002804 dev_err(rdev->dev, "disabling GPU acceleration\n");
2805 r700_cp_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002806 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002807 radeon_wb_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002808 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002809 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002810 rdev->accel_working = false;
2811 }
2812 if (rdev->accel_working) {
2813 r = radeon_ib_pool_init(rdev);
2814 if (r) {
2815 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2816 rdev->accel_working = false;
2817 }
2818 r = r600_ib_test(rdev);
2819 if (r) {
2820 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2821 rdev->accel_working = false;
2822 }
2823 }
2824 return 0;
2825}
2826
2827void evergreen_fini(struct radeon_device *rdev)
2828{
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002829 evergreen_blit_fini(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04002830 r700_cp_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002831 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002832 radeon_wb_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002833 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002834 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002835 radeon_gem_fini(rdev);
2836 radeon_fence_driver_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002837 radeon_agp_fini(rdev);
2838 radeon_bo_fini(rdev);
2839 radeon_atombios_fini(rdev);
2840 kfree(rdev->bios);
2841 rdev->bios = NULL;
2842 radeon_dummy_page_fini(rdev);
2843}