blob: 38c500ee417adeb0d8cee02726d0f20d97b59992 [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050027#include "drmP.h"
28#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050030#include "radeon_drm.h"
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
35
Alex Deucherfe251e22010-03-24 13:36:43 -040036#define EVERGREEN_PFP_UCODE_SIZE 1120
37#define EVERGREEN_PM4_UCODE_SIZE 1376
38
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050039static void evergreen_gpu_init(struct radeon_device *rdev);
40void evergreen_fini(struct radeon_device *rdev);
41
Alex Deucher49e02b72010-04-23 17:57:27 -040042void evergreen_pm_misc(struct radeon_device *rdev)
43{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -040044 int req_ps_idx = rdev->pm.requested_power_state_index;
45 int req_cm_idx = rdev->pm.requested_clock_mode_index;
46 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
47 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -040048
Alex Deucher4d601732010-06-07 18:15:18 -040049 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
50 if (voltage->voltage != rdev->pm.current_vddc) {
51 radeon_atom_set_voltage(rdev, voltage->voltage);
52 rdev->pm.current_vddc = voltage->voltage;
Rafał Miłecki0fcbe942010-06-07 18:25:21 -040053 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
Alex Deucher4d601732010-06-07 18:15:18 -040054 }
55 }
Alex Deucher49e02b72010-04-23 17:57:27 -040056}
57
58void evergreen_pm_prepare(struct radeon_device *rdev)
59{
60 struct drm_device *ddev = rdev->ddev;
61 struct drm_crtc *crtc;
62 struct radeon_crtc *radeon_crtc;
63 u32 tmp;
64
65 /* disable any active CRTCs */
66 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
67 radeon_crtc = to_radeon_crtc(crtc);
68 if (radeon_crtc->enabled) {
69 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
70 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
71 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
72 }
73 }
74}
75
76void evergreen_pm_finish(struct radeon_device *rdev)
77{
78 struct drm_device *ddev = rdev->ddev;
79 struct drm_crtc *crtc;
80 struct radeon_crtc *radeon_crtc;
81 u32 tmp;
82
83 /* enable any active CRTCs */
84 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
85 radeon_crtc = to_radeon_crtc(crtc);
86 if (radeon_crtc->enabled) {
87 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
88 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
89 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
90 }
91 }
92}
93
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050094bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
95{
96 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -050097
98 switch (hpd) {
99 case RADEON_HPD_1:
100 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
101 connected = true;
102 break;
103 case RADEON_HPD_2:
104 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
105 connected = true;
106 break;
107 case RADEON_HPD_3:
108 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
109 connected = true;
110 break;
111 case RADEON_HPD_4:
112 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
113 connected = true;
114 break;
115 case RADEON_HPD_5:
116 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
117 connected = true;
118 break;
119 case RADEON_HPD_6:
120 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
121 connected = true;
122 break;
123 default:
124 break;
125 }
126
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500127 return connected;
128}
129
130void evergreen_hpd_set_polarity(struct radeon_device *rdev,
131 enum radeon_hpd_id hpd)
132{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500133 u32 tmp;
134 bool connected = evergreen_hpd_sense(rdev, hpd);
135
136 switch (hpd) {
137 case RADEON_HPD_1:
138 tmp = RREG32(DC_HPD1_INT_CONTROL);
139 if (connected)
140 tmp &= ~DC_HPDx_INT_POLARITY;
141 else
142 tmp |= DC_HPDx_INT_POLARITY;
143 WREG32(DC_HPD1_INT_CONTROL, tmp);
144 break;
145 case RADEON_HPD_2:
146 tmp = RREG32(DC_HPD2_INT_CONTROL);
147 if (connected)
148 tmp &= ~DC_HPDx_INT_POLARITY;
149 else
150 tmp |= DC_HPDx_INT_POLARITY;
151 WREG32(DC_HPD2_INT_CONTROL, tmp);
152 break;
153 case RADEON_HPD_3:
154 tmp = RREG32(DC_HPD3_INT_CONTROL);
155 if (connected)
156 tmp &= ~DC_HPDx_INT_POLARITY;
157 else
158 tmp |= DC_HPDx_INT_POLARITY;
159 WREG32(DC_HPD3_INT_CONTROL, tmp);
160 break;
161 case RADEON_HPD_4:
162 tmp = RREG32(DC_HPD4_INT_CONTROL);
163 if (connected)
164 tmp &= ~DC_HPDx_INT_POLARITY;
165 else
166 tmp |= DC_HPDx_INT_POLARITY;
167 WREG32(DC_HPD4_INT_CONTROL, tmp);
168 break;
169 case RADEON_HPD_5:
170 tmp = RREG32(DC_HPD5_INT_CONTROL);
171 if (connected)
172 tmp &= ~DC_HPDx_INT_POLARITY;
173 else
174 tmp |= DC_HPDx_INT_POLARITY;
175 WREG32(DC_HPD5_INT_CONTROL, tmp);
176 break;
177 case RADEON_HPD_6:
178 tmp = RREG32(DC_HPD6_INT_CONTROL);
179 if (connected)
180 tmp &= ~DC_HPDx_INT_POLARITY;
181 else
182 tmp |= DC_HPDx_INT_POLARITY;
183 WREG32(DC_HPD6_INT_CONTROL, tmp);
184 break;
185 default:
186 break;
187 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500188}
189
190void evergreen_hpd_init(struct radeon_device *rdev)
191{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500192 struct drm_device *dev = rdev->ddev;
193 struct drm_connector *connector;
194 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
195 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500196
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500197 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
198 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
199 switch (radeon_connector->hpd.hpd) {
200 case RADEON_HPD_1:
201 WREG32(DC_HPD1_CONTROL, tmp);
202 rdev->irq.hpd[0] = true;
203 break;
204 case RADEON_HPD_2:
205 WREG32(DC_HPD2_CONTROL, tmp);
206 rdev->irq.hpd[1] = true;
207 break;
208 case RADEON_HPD_3:
209 WREG32(DC_HPD3_CONTROL, tmp);
210 rdev->irq.hpd[2] = true;
211 break;
212 case RADEON_HPD_4:
213 WREG32(DC_HPD4_CONTROL, tmp);
214 rdev->irq.hpd[3] = true;
215 break;
216 case RADEON_HPD_5:
217 WREG32(DC_HPD5_CONTROL, tmp);
218 rdev->irq.hpd[4] = true;
219 break;
220 case RADEON_HPD_6:
221 WREG32(DC_HPD6_CONTROL, tmp);
222 rdev->irq.hpd[5] = true;
223 break;
224 default:
225 break;
226 }
227 }
228 if (rdev->irq.installed)
229 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500230}
231
232void evergreen_hpd_fini(struct radeon_device *rdev)
233{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500234 struct drm_device *dev = rdev->ddev;
235 struct drm_connector *connector;
236
237 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
238 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
239 switch (radeon_connector->hpd.hpd) {
240 case RADEON_HPD_1:
241 WREG32(DC_HPD1_CONTROL, 0);
242 rdev->irq.hpd[0] = false;
243 break;
244 case RADEON_HPD_2:
245 WREG32(DC_HPD2_CONTROL, 0);
246 rdev->irq.hpd[1] = false;
247 break;
248 case RADEON_HPD_3:
249 WREG32(DC_HPD3_CONTROL, 0);
250 rdev->irq.hpd[2] = false;
251 break;
252 case RADEON_HPD_4:
253 WREG32(DC_HPD4_CONTROL, 0);
254 rdev->irq.hpd[3] = false;
255 break;
256 case RADEON_HPD_5:
257 WREG32(DC_HPD5_CONTROL, 0);
258 rdev->irq.hpd[4] = false;
259 break;
260 case RADEON_HPD_6:
261 WREG32(DC_HPD6_CONTROL, 0);
262 rdev->irq.hpd[5] = false;
263 break;
264 default:
265 break;
266 }
267 }
268}
269
270void evergreen_bandwidth_update(struct radeon_device *rdev)
271{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500272 /* XXX */
273}
274
275static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
276{
277 unsigned i;
278 u32 tmp;
279
280 for (i = 0; i < rdev->usec_timeout; i++) {
281 /* read MC_STATUS */
282 tmp = RREG32(SRBM_STATUS) & 0x1F00;
283 if (!tmp)
284 return 0;
285 udelay(1);
286 }
287 return -1;
288}
289
290/*
291 * GART
292 */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400293void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
294{
295 unsigned i;
296 u32 tmp;
297
298 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
299 for (i = 0; i < rdev->usec_timeout; i++) {
300 /* read MC_STATUS */
301 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
302 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
303 if (tmp == 2) {
304 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
305 return;
306 }
307 if (tmp) {
308 return;
309 }
310 udelay(1);
311 }
312}
313
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500314int evergreen_pcie_gart_enable(struct radeon_device *rdev)
315{
316 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400317 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500318
319 if (rdev->gart.table.vram.robj == NULL) {
320 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
321 return -EINVAL;
322 }
323 r = radeon_gart_table_vram_pin(rdev);
324 if (r)
325 return r;
Dave Airlie82568562010-02-05 16:00:07 +1000326 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500327 /* Setup L2 cache */
328 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
329 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
330 EFFECTIVE_L2_QUEUE_SIZE(7));
331 WREG32(VM_L2_CNTL2, 0);
332 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
333 /* Setup TLB control */
334 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
335 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
336 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
337 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
338 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
339 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
340 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
341 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
342 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
343 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
344 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
345 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
346 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
347 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
348 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
349 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
350 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
351 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -0400352 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500353
Alex Deucher0fcdb612010-03-24 13:20:41 -0400354 evergreen_pcie_gart_tlb_flush(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500355 rdev->gart.ready = true;
356 return 0;
357}
358
359void evergreen_pcie_gart_disable(struct radeon_device *rdev)
360{
361 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400362 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500363
364 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400365 WREG32(VM_CONTEXT0_CNTL, 0);
366 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500367
368 /* Setup L2 cache */
369 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
370 EFFECTIVE_L2_QUEUE_SIZE(7));
371 WREG32(VM_L2_CNTL2, 0);
372 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
373 /* Setup TLB control */
374 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
375 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
376 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
377 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
378 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
379 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
380 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
381 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
382 if (rdev->gart.table.vram.robj) {
383 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
384 if (likely(r == 0)) {
385 radeon_bo_kunmap(rdev->gart.table.vram.robj);
386 radeon_bo_unpin(rdev->gart.table.vram.robj);
387 radeon_bo_unreserve(rdev->gart.table.vram.robj);
388 }
389 }
390}
391
392void evergreen_pcie_gart_fini(struct radeon_device *rdev)
393{
394 evergreen_pcie_gart_disable(rdev);
395 radeon_gart_table_vram_free(rdev);
396 radeon_gart_fini(rdev);
397}
398
399
400void evergreen_agp_enable(struct radeon_device *rdev)
401{
402 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500403
404 /* Setup L2 cache */
405 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
406 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
407 EFFECTIVE_L2_QUEUE_SIZE(7));
408 WREG32(VM_L2_CNTL2, 0);
409 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
410 /* Setup TLB control */
411 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
412 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
413 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
414 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
415 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
416 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
417 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
418 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
419 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
420 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
421 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400422 WREG32(VM_CONTEXT0_CNTL, 0);
423 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500424}
425
426static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
427{
428 save->vga_control[0] = RREG32(D1VGA_CONTROL);
429 save->vga_control[1] = RREG32(D2VGA_CONTROL);
430 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
431 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
432 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
433 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
434 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
435 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
436 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
437 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
438 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
439 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
440 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
441 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
442
443 /* Stop all video */
444 WREG32(VGA_RENDER_CONTROL, 0);
445 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
446 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
447 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
448 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
449 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
450 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
451 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
452 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
453 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
454 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
455 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
456 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
457 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
458 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
459 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
460 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
461 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
462 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
463
464 WREG32(D1VGA_CONTROL, 0);
465 WREG32(D2VGA_CONTROL, 0);
466 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
467 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
468 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
469 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
470}
471
472static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
473{
474 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
475 upper_32_bits(rdev->mc.vram_start));
476 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
477 upper_32_bits(rdev->mc.vram_start));
478 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
479 (u32)rdev->mc.vram_start);
480 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
481 (u32)rdev->mc.vram_start);
482
483 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
484 upper_32_bits(rdev->mc.vram_start));
485 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
486 upper_32_bits(rdev->mc.vram_start));
487 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
488 (u32)rdev->mc.vram_start);
489 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
490 (u32)rdev->mc.vram_start);
491
492 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
493 upper_32_bits(rdev->mc.vram_start));
494 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
495 upper_32_bits(rdev->mc.vram_start));
496 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
497 (u32)rdev->mc.vram_start);
498 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
499 (u32)rdev->mc.vram_start);
500
501 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
502 upper_32_bits(rdev->mc.vram_start));
503 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
504 upper_32_bits(rdev->mc.vram_start));
505 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
506 (u32)rdev->mc.vram_start);
507 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
508 (u32)rdev->mc.vram_start);
509
510 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
511 upper_32_bits(rdev->mc.vram_start));
512 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
513 upper_32_bits(rdev->mc.vram_start));
514 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
515 (u32)rdev->mc.vram_start);
516 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
517 (u32)rdev->mc.vram_start);
518
519 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
520 upper_32_bits(rdev->mc.vram_start));
521 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
522 upper_32_bits(rdev->mc.vram_start));
523 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
524 (u32)rdev->mc.vram_start);
525 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
526 (u32)rdev->mc.vram_start);
527
528 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
529 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
530 /* Unlock host access */
531 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
532 mdelay(1);
533 /* Restore video state */
534 WREG32(D1VGA_CONTROL, save->vga_control[0]);
535 WREG32(D2VGA_CONTROL, save->vga_control[1]);
536 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
537 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
538 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
539 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
540 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
541 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
542 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
543 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
544 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
545 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
546 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
547 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
548 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
549 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
550 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
551 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
552 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
553 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
554 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
555 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
556 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
557 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
558 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
559}
560
561static void evergreen_mc_program(struct radeon_device *rdev)
562{
563 struct evergreen_mc_save save;
564 u32 tmp;
565 int i, j;
566
567 /* Initialize HDP */
568 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
569 WREG32((0x2c14 + j), 0x00000000);
570 WREG32((0x2c18 + j), 0x00000000);
571 WREG32((0x2c1c + j), 0x00000000);
572 WREG32((0x2c20 + j), 0x00000000);
573 WREG32((0x2c24 + j), 0x00000000);
574 }
575 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
576
577 evergreen_mc_stop(rdev, &save);
578 if (evergreen_mc_wait_for_idle(rdev)) {
579 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
580 }
581 /* Lockout access through VGA aperture*/
582 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
583 /* Update configuration */
584 if (rdev->flags & RADEON_IS_AGP) {
585 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
586 /* VRAM before AGP */
587 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
588 rdev->mc.vram_start >> 12);
589 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
590 rdev->mc.gtt_end >> 12);
591 } else {
592 /* VRAM after AGP */
593 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
594 rdev->mc.gtt_start >> 12);
595 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
596 rdev->mc.vram_end >> 12);
597 }
598 } else {
599 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
600 rdev->mc.vram_start >> 12);
601 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
602 rdev->mc.vram_end >> 12);
603 }
604 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
605 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
606 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
607 WREG32(MC_VM_FB_LOCATION, tmp);
608 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
609 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
610 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
611 if (rdev->flags & RADEON_IS_AGP) {
612 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
613 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
614 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
615 } else {
616 WREG32(MC_VM_AGP_BASE, 0);
617 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
618 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
619 }
620 if (evergreen_mc_wait_for_idle(rdev)) {
621 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
622 }
623 evergreen_mc_resume(rdev, &save);
624 /* we need to own VRAM, so turn off the VGA renderer here
625 * to stop it overwriting our objects */
626 rv515_vga_render_disable(rdev);
627}
628
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500629/*
630 * CP.
631 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500632
633static int evergreen_cp_load_microcode(struct radeon_device *rdev)
634{
Alex Deucherfe251e22010-03-24 13:36:43 -0400635 const __be32 *fw_data;
636 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500637
Alex Deucherfe251e22010-03-24 13:36:43 -0400638 if (!rdev->me_fw || !rdev->pfp_fw)
639 return -EINVAL;
640
641 r700_cp_stop(rdev);
642 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
643
644 fw_data = (const __be32 *)rdev->pfp_fw->data;
645 WREG32(CP_PFP_UCODE_ADDR, 0);
646 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
647 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
648 WREG32(CP_PFP_UCODE_ADDR, 0);
649
650 fw_data = (const __be32 *)rdev->me_fw->data;
651 WREG32(CP_ME_RAM_WADDR, 0);
652 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
653 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
654
655 WREG32(CP_PFP_UCODE_ADDR, 0);
656 WREG32(CP_ME_RAM_WADDR, 0);
657 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500658 return 0;
659}
660
Alex Deucherfe251e22010-03-24 13:36:43 -0400661int evergreen_cp_resume(struct radeon_device *rdev)
662{
663 u32 tmp;
664 u32 rb_bufsz;
665 int r;
666
667 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
668 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
669 SOFT_RESET_PA |
670 SOFT_RESET_SH |
671 SOFT_RESET_VGT |
672 SOFT_RESET_SX));
673 RREG32(GRBM_SOFT_RESET);
674 mdelay(15);
675 WREG32(GRBM_SOFT_RESET, 0);
676 RREG32(GRBM_SOFT_RESET);
677
678 /* Set ring buffer size */
679 rb_bufsz = drm_order(rdev->cp.ring_size / 8);
680 tmp = RB_NO_UPDATE | (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
681#ifdef __BIG_ENDIAN
682 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400683#endif
Alex Deucherfe251e22010-03-24 13:36:43 -0400684 WREG32(CP_RB_CNTL, tmp);
685 WREG32(CP_SEM_WAIT_TIMER, 0x4);
686
687 /* Set the write pointer delay */
688 WREG32(CP_RB_WPTR_DELAY, 0);
689
690 /* Initialize the ring buffer's read and write pointers */
691 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
692 WREG32(CP_RB_RPTR_WR, 0);
693 WREG32(CP_RB_WPTR, 0);
694 WREG32(CP_RB_RPTR_ADDR, rdev->cp.gpu_addr & 0xFFFFFFFF);
695 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->cp.gpu_addr));
696 mdelay(1);
697 WREG32(CP_RB_CNTL, tmp);
698
699 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
700 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
701
702 rdev->cp.rptr = RREG32(CP_RB_RPTR);
703 rdev->cp.wptr = RREG32(CP_RB_WPTR);
704
705 r600_cp_start(rdev);
706 rdev->cp.ready = true;
707 r = radeon_ring_test(rdev);
708 if (r) {
709 rdev->cp.ready = false;
710 return r;
711 }
712 return 0;
713}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500714
715/*
716 * Core functions
717 */
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400718static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
719 u32 num_tile_pipes,
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500720 u32 num_backends,
721 u32 backend_disable_mask)
722{
723 u32 backend_map = 0;
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400724 u32 enabled_backends_mask = 0;
725 u32 enabled_backends_count = 0;
726 u32 cur_pipe;
727 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
728 u32 cur_backend = 0;
729 u32 i;
730 bool force_no_swizzle;
731
732 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
733 num_tile_pipes = EVERGREEN_MAX_PIPES;
734 if (num_tile_pipes < 1)
735 num_tile_pipes = 1;
736 if (num_backends > EVERGREEN_MAX_BACKENDS)
737 num_backends = EVERGREEN_MAX_BACKENDS;
738 if (num_backends < 1)
739 num_backends = 1;
740
741 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
742 if (((backend_disable_mask >> i) & 1) == 0) {
743 enabled_backends_mask |= (1 << i);
744 ++enabled_backends_count;
745 }
746 if (enabled_backends_count == num_backends)
747 break;
748 }
749
750 if (enabled_backends_count == 0) {
751 enabled_backends_mask = 1;
752 enabled_backends_count = 1;
753 }
754
755 if (enabled_backends_count != num_backends)
756 num_backends = enabled_backends_count;
757
758 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
759 switch (rdev->family) {
760 case CHIP_CEDAR:
761 case CHIP_REDWOOD:
762 force_no_swizzle = false;
763 break;
764 case CHIP_CYPRESS:
765 case CHIP_HEMLOCK:
766 case CHIP_JUNIPER:
767 default:
768 force_no_swizzle = true;
769 break;
770 }
771 if (force_no_swizzle) {
772 bool last_backend_enabled = false;
773
774 force_no_swizzle = false;
775 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
776 if (((enabled_backends_mask >> i) & 1) == 1) {
777 if (last_backend_enabled)
778 force_no_swizzle = true;
779 last_backend_enabled = true;
780 } else
781 last_backend_enabled = false;
782 }
783 }
784
785 switch (num_tile_pipes) {
786 case 1:
787 case 3:
788 case 5:
789 case 7:
790 DRM_ERROR("odd number of pipes!\n");
791 break;
792 case 2:
793 swizzle_pipe[0] = 0;
794 swizzle_pipe[1] = 1;
795 break;
796 case 4:
797 if (force_no_swizzle) {
798 swizzle_pipe[0] = 0;
799 swizzle_pipe[1] = 1;
800 swizzle_pipe[2] = 2;
801 swizzle_pipe[3] = 3;
802 } else {
803 swizzle_pipe[0] = 0;
804 swizzle_pipe[1] = 2;
805 swizzle_pipe[2] = 1;
806 swizzle_pipe[3] = 3;
807 }
808 break;
809 case 6:
810 if (force_no_swizzle) {
811 swizzle_pipe[0] = 0;
812 swizzle_pipe[1] = 1;
813 swizzle_pipe[2] = 2;
814 swizzle_pipe[3] = 3;
815 swizzle_pipe[4] = 4;
816 swizzle_pipe[5] = 5;
817 } else {
818 swizzle_pipe[0] = 0;
819 swizzle_pipe[1] = 2;
820 swizzle_pipe[2] = 4;
821 swizzle_pipe[3] = 1;
822 swizzle_pipe[4] = 3;
823 swizzle_pipe[5] = 5;
824 }
825 break;
826 case 8:
827 if (force_no_swizzle) {
828 swizzle_pipe[0] = 0;
829 swizzle_pipe[1] = 1;
830 swizzle_pipe[2] = 2;
831 swizzle_pipe[3] = 3;
832 swizzle_pipe[4] = 4;
833 swizzle_pipe[5] = 5;
834 swizzle_pipe[6] = 6;
835 swizzle_pipe[7] = 7;
836 } else {
837 swizzle_pipe[0] = 0;
838 swizzle_pipe[1] = 2;
839 swizzle_pipe[2] = 4;
840 swizzle_pipe[3] = 6;
841 swizzle_pipe[4] = 1;
842 swizzle_pipe[5] = 3;
843 swizzle_pipe[6] = 5;
844 swizzle_pipe[7] = 7;
845 }
846 break;
847 }
848
849 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
850 while (((1 << cur_backend) & enabled_backends_mask) == 0)
851 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
852
853 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
854
855 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
856 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500857
858 return backend_map;
859}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500860
861static void evergreen_gpu_init(struct radeon_device *rdev)
862{
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400863 u32 cc_rb_backend_disable = 0;
864 u32 cc_gc_shader_pipe_config;
865 u32 gb_addr_config = 0;
866 u32 mc_shared_chmap, mc_arb_ramcfg;
867 u32 gb_backend_map;
868 u32 grbm_gfx_index;
869 u32 sx_debug_1;
870 u32 smx_dc_ctl0;
871 u32 sq_config;
872 u32 sq_lds_resource_mgmt;
873 u32 sq_gpr_resource_mgmt_1;
874 u32 sq_gpr_resource_mgmt_2;
875 u32 sq_gpr_resource_mgmt_3;
876 u32 sq_thread_resource_mgmt;
877 u32 sq_thread_resource_mgmt_2;
878 u32 sq_stack_resource_mgmt_1;
879 u32 sq_stack_resource_mgmt_2;
880 u32 sq_stack_resource_mgmt_3;
881 u32 vgt_cache_invalidation;
882 u32 hdp_host_path_cntl;
883 int i, j, num_shader_engines, ps_thread_count;
884
885 switch (rdev->family) {
886 case CHIP_CYPRESS:
887 case CHIP_HEMLOCK:
888 rdev->config.evergreen.num_ses = 2;
889 rdev->config.evergreen.max_pipes = 4;
890 rdev->config.evergreen.max_tile_pipes = 8;
891 rdev->config.evergreen.max_simds = 10;
892 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
893 rdev->config.evergreen.max_gprs = 256;
894 rdev->config.evergreen.max_threads = 248;
895 rdev->config.evergreen.max_gs_threads = 32;
896 rdev->config.evergreen.max_stack_entries = 512;
897 rdev->config.evergreen.sx_num_of_sets = 4;
898 rdev->config.evergreen.sx_max_export_size = 256;
899 rdev->config.evergreen.sx_max_export_pos_size = 64;
900 rdev->config.evergreen.sx_max_export_smx_size = 192;
901 rdev->config.evergreen.max_hw_contexts = 8;
902 rdev->config.evergreen.sq_num_cf_insts = 2;
903
904 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
905 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
906 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
907 break;
908 case CHIP_JUNIPER:
909 rdev->config.evergreen.num_ses = 1;
910 rdev->config.evergreen.max_pipes = 4;
911 rdev->config.evergreen.max_tile_pipes = 4;
912 rdev->config.evergreen.max_simds = 10;
913 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
914 rdev->config.evergreen.max_gprs = 256;
915 rdev->config.evergreen.max_threads = 248;
916 rdev->config.evergreen.max_gs_threads = 32;
917 rdev->config.evergreen.max_stack_entries = 512;
918 rdev->config.evergreen.sx_num_of_sets = 4;
919 rdev->config.evergreen.sx_max_export_size = 256;
920 rdev->config.evergreen.sx_max_export_pos_size = 64;
921 rdev->config.evergreen.sx_max_export_smx_size = 192;
922 rdev->config.evergreen.max_hw_contexts = 8;
923 rdev->config.evergreen.sq_num_cf_insts = 2;
924
925 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
926 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
927 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
928 break;
929 case CHIP_REDWOOD:
930 rdev->config.evergreen.num_ses = 1;
931 rdev->config.evergreen.max_pipes = 4;
932 rdev->config.evergreen.max_tile_pipes = 4;
933 rdev->config.evergreen.max_simds = 5;
934 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
935 rdev->config.evergreen.max_gprs = 256;
936 rdev->config.evergreen.max_threads = 248;
937 rdev->config.evergreen.max_gs_threads = 32;
938 rdev->config.evergreen.max_stack_entries = 256;
939 rdev->config.evergreen.sx_num_of_sets = 4;
940 rdev->config.evergreen.sx_max_export_size = 256;
941 rdev->config.evergreen.sx_max_export_pos_size = 64;
942 rdev->config.evergreen.sx_max_export_smx_size = 192;
943 rdev->config.evergreen.max_hw_contexts = 8;
944 rdev->config.evergreen.sq_num_cf_insts = 2;
945
946 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
947 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
948 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
949 break;
950 case CHIP_CEDAR:
951 default:
952 rdev->config.evergreen.num_ses = 1;
953 rdev->config.evergreen.max_pipes = 2;
954 rdev->config.evergreen.max_tile_pipes = 2;
955 rdev->config.evergreen.max_simds = 2;
956 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
957 rdev->config.evergreen.max_gprs = 256;
958 rdev->config.evergreen.max_threads = 192;
959 rdev->config.evergreen.max_gs_threads = 16;
960 rdev->config.evergreen.max_stack_entries = 256;
961 rdev->config.evergreen.sx_num_of_sets = 4;
962 rdev->config.evergreen.sx_max_export_size = 128;
963 rdev->config.evergreen.sx_max_export_pos_size = 32;
964 rdev->config.evergreen.sx_max_export_smx_size = 96;
965 rdev->config.evergreen.max_hw_contexts = 4;
966 rdev->config.evergreen.sq_num_cf_insts = 1;
967
968 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
969 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
970 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
971 break;
972 }
973
974 /* Initialize HDP */
975 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
976 WREG32((0x2c14 + j), 0x00000000);
977 WREG32((0x2c18 + j), 0x00000000);
978 WREG32((0x2c1c + j), 0x00000000);
979 WREG32((0x2c20 + j), 0x00000000);
980 WREG32((0x2c24 + j), 0x00000000);
981 }
982
983 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
984
985 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
986
987 cc_gc_shader_pipe_config |=
988 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
989 & EVERGREEN_MAX_PIPES_MASK);
990 cc_gc_shader_pipe_config |=
991 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
992 & EVERGREEN_MAX_SIMDS_MASK);
993
994 cc_rb_backend_disable =
995 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
996 & EVERGREEN_MAX_BACKENDS_MASK);
997
998
999 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1000 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1001
1002 switch (rdev->config.evergreen.max_tile_pipes) {
1003 case 1:
1004 default:
1005 gb_addr_config |= NUM_PIPES(0);
1006 break;
1007 case 2:
1008 gb_addr_config |= NUM_PIPES(1);
1009 break;
1010 case 4:
1011 gb_addr_config |= NUM_PIPES(2);
1012 break;
1013 case 8:
1014 gb_addr_config |= NUM_PIPES(3);
1015 break;
1016 }
1017
1018 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1019 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1020 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1021 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1022 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1023 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1024
1025 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1026 gb_addr_config |= ROW_SIZE(2);
1027 else
1028 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1029
1030 if (rdev->ddev->pdev->device == 0x689e) {
1031 u32 efuse_straps_4;
1032 u32 efuse_straps_3;
1033 u8 efuse_box_bit_131_124;
1034
1035 WREG32(RCU_IND_INDEX, 0x204);
1036 efuse_straps_4 = RREG32(RCU_IND_DATA);
1037 WREG32(RCU_IND_INDEX, 0x203);
1038 efuse_straps_3 = RREG32(RCU_IND_DATA);
1039 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1040
1041 switch(efuse_box_bit_131_124) {
1042 case 0x00:
1043 gb_backend_map = 0x76543210;
1044 break;
1045 case 0x55:
1046 gb_backend_map = 0x77553311;
1047 break;
1048 case 0x56:
1049 gb_backend_map = 0x77553300;
1050 break;
1051 case 0x59:
1052 gb_backend_map = 0x77552211;
1053 break;
1054 case 0x66:
1055 gb_backend_map = 0x77443300;
1056 break;
1057 case 0x99:
1058 gb_backend_map = 0x66552211;
1059 break;
1060 case 0x5a:
1061 gb_backend_map = 0x77552200;
1062 break;
1063 case 0xaa:
1064 gb_backend_map = 0x66442200;
1065 break;
1066 case 0x95:
1067 gb_backend_map = 0x66553311;
1068 break;
1069 default:
1070 DRM_ERROR("bad backend map, using default\n");
1071 gb_backend_map =
1072 evergreen_get_tile_pipe_to_backend_map(rdev,
1073 rdev->config.evergreen.max_tile_pipes,
1074 rdev->config.evergreen.max_backends,
1075 ((EVERGREEN_MAX_BACKENDS_MASK <<
1076 rdev->config.evergreen.max_backends) &
1077 EVERGREEN_MAX_BACKENDS_MASK));
1078 break;
1079 }
1080 } else if (rdev->ddev->pdev->device == 0x68b9) {
1081 u32 efuse_straps_3;
1082 u8 efuse_box_bit_127_124;
1083
1084 WREG32(RCU_IND_INDEX, 0x203);
1085 efuse_straps_3 = RREG32(RCU_IND_DATA);
1086 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
1087
1088 switch(efuse_box_bit_127_124) {
1089 case 0x0:
1090 gb_backend_map = 0x00003210;
1091 break;
1092 case 0x5:
1093 case 0x6:
1094 case 0x9:
1095 case 0xa:
1096 gb_backend_map = 0x00003311;
1097 break;
1098 default:
1099 DRM_ERROR("bad backend map, using default\n");
1100 gb_backend_map =
1101 evergreen_get_tile_pipe_to_backend_map(rdev,
1102 rdev->config.evergreen.max_tile_pipes,
1103 rdev->config.evergreen.max_backends,
1104 ((EVERGREEN_MAX_BACKENDS_MASK <<
1105 rdev->config.evergreen.max_backends) &
1106 EVERGREEN_MAX_BACKENDS_MASK));
1107 break;
1108 }
1109 } else
1110 gb_backend_map =
1111 evergreen_get_tile_pipe_to_backend_map(rdev,
1112 rdev->config.evergreen.max_tile_pipes,
1113 rdev->config.evergreen.max_backends,
1114 ((EVERGREEN_MAX_BACKENDS_MASK <<
1115 rdev->config.evergreen.max_backends) &
1116 EVERGREEN_MAX_BACKENDS_MASK));
1117
1118 WREG32(GB_BACKEND_MAP, gb_backend_map);
1119 WREG32(GB_ADDR_CONFIG, gb_addr_config);
1120 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1121 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1122
1123 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1124 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1125
1126 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1127 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1128 u32 sp = cc_gc_shader_pipe_config;
1129 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1130
1131 if (i == num_shader_engines) {
1132 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1133 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1134 }
1135
1136 WREG32(GRBM_GFX_INDEX, gfx);
1137 WREG32(RLC_GFX_INDEX, gfx);
1138
1139 WREG32(CC_RB_BACKEND_DISABLE, rb);
1140 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1141 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1142 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1143 }
1144
1145 grbm_gfx_index |= SE_BROADCAST_WRITES;
1146 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1147 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1148
1149 WREG32(CGTS_SYS_TCC_DISABLE, 0);
1150 WREG32(CGTS_TCC_DISABLE, 0);
1151 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1152 WREG32(CGTS_USER_TCC_DISABLE, 0);
1153
1154 /* set HW defaults for 3D engine */
1155 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1156 ROQ_IB2_START(0x2b)));
1157
1158 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1159
1160 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1161 SYNC_GRADIENT |
1162 SYNC_WALKER |
1163 SYNC_ALIGNER));
1164
1165 sx_debug_1 = RREG32(SX_DEBUG_1);
1166 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1167 WREG32(SX_DEBUG_1, sx_debug_1);
1168
1169
1170 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1171 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1172 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1173 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1174
1175 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1176 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1177 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1178
1179 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1180 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1181 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1182
1183 WREG32(VGT_NUM_INSTANCES, 1);
1184 WREG32(SPI_CONFIG_CNTL, 0);
1185 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1186 WREG32(CP_PERFMON_CNTL, 0);
1187
1188 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1189 FETCH_FIFO_HIWATER(0x4) |
1190 DONE_FIFO_HIWATER(0xe0) |
1191 ALU_UPDATE_FIFO_HIWATER(0x8)));
1192
1193 sq_config = RREG32(SQ_CONFIG);
1194 sq_config &= ~(PS_PRIO(3) |
1195 VS_PRIO(3) |
1196 GS_PRIO(3) |
1197 ES_PRIO(3));
1198 sq_config |= (VC_ENABLE |
1199 EXPORT_SRC_C |
1200 PS_PRIO(0) |
1201 VS_PRIO(1) |
1202 GS_PRIO(2) |
1203 ES_PRIO(3));
1204
1205 if (rdev->family == CHIP_CEDAR)
1206 /* no vertex cache */
1207 sq_config &= ~VC_ENABLE;
1208
1209 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1210
1211 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1212 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1213 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1214 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1215 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1216 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1217 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1218
1219 if (rdev->family == CHIP_CEDAR)
1220 ps_thread_count = 96;
1221 else
1222 ps_thread_count = 128;
1223
1224 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04001225 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1226 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1227 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1228 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1229 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001230
1231 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1232 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1233 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1234 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1235 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1236 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1237
1238 WREG32(SQ_CONFIG, sq_config);
1239 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1240 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1241 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1242 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1243 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1244 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1245 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1246 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1247 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1248 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1249
1250 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1251 FORCE_EOV_MAX_REZ_CNT(255)));
1252
1253 if (rdev->family == CHIP_CEDAR)
1254 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1255 else
1256 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1257 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1258 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1259
1260 WREG32(VGT_GS_VERTEX_REUSE, 16);
1261 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1262
1263 WREG32(CB_PERF_CTR0_SEL_0, 0);
1264 WREG32(CB_PERF_CTR0_SEL_1, 0);
1265 WREG32(CB_PERF_CTR1_SEL_0, 0);
1266 WREG32(CB_PERF_CTR1_SEL_1, 0);
1267 WREG32(CB_PERF_CTR2_SEL_0, 0);
1268 WREG32(CB_PERF_CTR2_SEL_1, 0);
1269 WREG32(CB_PERF_CTR3_SEL_0, 0);
1270 WREG32(CB_PERF_CTR3_SEL_1, 0);
1271
1272 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1273 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1274
1275 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1276
1277 udelay(50);
1278
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001279}
1280
1281int evergreen_mc_init(struct radeon_device *rdev)
1282{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001283 u32 tmp;
1284 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001285
1286 /* Get VRAM informations */
1287 rdev->mc.vram_is_ddr = true;
1288 tmp = RREG32(MC_ARB_RAMCFG);
1289 if (tmp & CHANSIZE_OVERRIDE) {
1290 chansize = 16;
1291 } else if (tmp & CHANSIZE_MASK) {
1292 chansize = 64;
1293 } else {
1294 chansize = 32;
1295 }
1296 tmp = RREG32(MC_SHARED_CHMAP);
1297 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1298 case 0:
1299 default:
1300 numchan = 1;
1301 break;
1302 case 1:
1303 numchan = 2;
1304 break;
1305 case 2:
1306 numchan = 4;
1307 break;
1308 case 3:
1309 numchan = 8;
1310 break;
1311 }
1312 rdev->mc.vram_width = numchan * chansize;
1313 /* Could aper size report 0 ? */
1314 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1315 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1316 /* Setup GPU memory space */
1317 /* size in MB on evergreen */
1318 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1319 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00001320 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Jerome Glissed594e462010-02-17 21:54:29 +00001321 r600_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04001322 radeon_update_bandwidth_info(rdev);
1323
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001324 return 0;
1325}
Jerome Glissed594e462010-02-17 21:54:29 +00001326
Jerome Glisse225758d2010-03-09 14:45:10 +00001327bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1328{
1329 /* FIXME: implement for evergreen */
1330 return false;
1331}
1332
Alex Deucher747943e2010-03-24 13:26:36 -04001333static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1334{
1335 struct evergreen_mc_save save;
1336 u32 srbm_reset = 0;
1337 u32 grbm_reset = 0;
1338
1339 dev_info(rdev->dev, "GPU softreset \n");
1340 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1341 RREG32(GRBM_STATUS));
1342 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1343 RREG32(GRBM_STATUS_SE0));
1344 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1345 RREG32(GRBM_STATUS_SE1));
1346 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1347 RREG32(SRBM_STATUS));
1348 evergreen_mc_stop(rdev, &save);
1349 if (evergreen_mc_wait_for_idle(rdev)) {
1350 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1351 }
1352 /* Disable CP parsing/prefetching */
1353 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1354
1355 /* reset all the gfx blocks */
1356 grbm_reset = (SOFT_RESET_CP |
1357 SOFT_RESET_CB |
1358 SOFT_RESET_DB |
1359 SOFT_RESET_PA |
1360 SOFT_RESET_SC |
1361 SOFT_RESET_SPI |
1362 SOFT_RESET_SH |
1363 SOFT_RESET_SX |
1364 SOFT_RESET_TC |
1365 SOFT_RESET_TA |
1366 SOFT_RESET_VC |
1367 SOFT_RESET_VGT);
1368
1369 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1370 WREG32(GRBM_SOFT_RESET, grbm_reset);
1371 (void)RREG32(GRBM_SOFT_RESET);
1372 udelay(50);
1373 WREG32(GRBM_SOFT_RESET, 0);
1374 (void)RREG32(GRBM_SOFT_RESET);
1375
1376 /* reset all the system blocks */
1377 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1378
1379 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1380 WREG32(SRBM_SOFT_RESET, srbm_reset);
1381 (void)RREG32(SRBM_SOFT_RESET);
1382 udelay(50);
1383 WREG32(SRBM_SOFT_RESET, 0);
1384 (void)RREG32(SRBM_SOFT_RESET);
1385 /* Wait a little for things to settle down */
1386 udelay(50);
1387 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1388 RREG32(GRBM_STATUS));
1389 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1390 RREG32(GRBM_STATUS_SE0));
1391 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1392 RREG32(GRBM_STATUS_SE1));
1393 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1394 RREG32(SRBM_STATUS));
1395 /* After reset we need to reinit the asic as GPU often endup in an
1396 * incoherent state.
1397 */
1398 atom_asic_init(rdev->mode_info.atom_context);
1399 evergreen_mc_resume(rdev, &save);
1400 return 0;
1401}
1402
Jerome Glissea2d07b72010-03-09 14:45:11 +00001403int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001404{
Alex Deucher747943e2010-03-24 13:26:36 -04001405 return evergreen_gpu_soft_reset(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001406}
1407
Alex Deucher45f9a392010-03-24 13:55:51 -04001408/* Interrupts */
1409
1410u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
1411{
1412 switch (crtc) {
1413 case 0:
1414 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
1415 case 1:
1416 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
1417 case 2:
1418 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
1419 case 3:
1420 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
1421 case 4:
1422 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
1423 case 5:
1424 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
1425 default:
1426 return 0;
1427 }
1428}
1429
1430void evergreen_disable_interrupt_state(struct radeon_device *rdev)
1431{
1432 u32 tmp;
1433
1434 WREG32(CP_INT_CNTL, 0);
1435 WREG32(GRBM_INT_CNTL, 0);
1436 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1437 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1438 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1439 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1440 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1441 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1442
1443 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1444 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1445 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1446 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1447 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1448 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1449
1450 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
1451 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
1452
1453 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1454 WREG32(DC_HPD1_INT_CONTROL, tmp);
1455 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1456 WREG32(DC_HPD2_INT_CONTROL, tmp);
1457 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1458 WREG32(DC_HPD3_INT_CONTROL, tmp);
1459 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1460 WREG32(DC_HPD4_INT_CONTROL, tmp);
1461 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1462 WREG32(DC_HPD5_INT_CONTROL, tmp);
1463 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
1464 WREG32(DC_HPD6_INT_CONTROL, tmp);
1465
1466}
1467
1468int evergreen_irq_set(struct radeon_device *rdev)
1469{
1470 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
1471 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
1472 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04001473 u32 grbm_int_cntl = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04001474
1475 if (!rdev->irq.installed) {
1476 WARN(1, "Can't enable IRQ/MSI because no handler is installed.\n");
1477 return -EINVAL;
1478 }
1479 /* don't enable anything if the ih is disabled */
1480 if (!rdev->ih.enabled) {
1481 r600_disable_interrupts(rdev);
1482 /* force the active interrupt state to all disabled */
1483 evergreen_disable_interrupt_state(rdev);
1484 return 0;
1485 }
1486
1487 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
1488 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
1489 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
1490 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
1491 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
1492 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
1493
1494 if (rdev->irq.sw_int) {
1495 DRM_DEBUG("evergreen_irq_set: sw int\n");
1496 cp_int_cntl |= RB_INT_ENABLE;
1497 }
1498 if (rdev->irq.crtc_vblank_int[0]) {
1499 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
1500 crtc1 |= VBLANK_INT_MASK;
1501 }
1502 if (rdev->irq.crtc_vblank_int[1]) {
1503 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
1504 crtc2 |= VBLANK_INT_MASK;
1505 }
1506 if (rdev->irq.crtc_vblank_int[2]) {
1507 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
1508 crtc3 |= VBLANK_INT_MASK;
1509 }
1510 if (rdev->irq.crtc_vblank_int[3]) {
1511 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
1512 crtc4 |= VBLANK_INT_MASK;
1513 }
1514 if (rdev->irq.crtc_vblank_int[4]) {
1515 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
1516 crtc5 |= VBLANK_INT_MASK;
1517 }
1518 if (rdev->irq.crtc_vblank_int[5]) {
1519 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
1520 crtc6 |= VBLANK_INT_MASK;
1521 }
1522 if (rdev->irq.hpd[0]) {
1523 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
1524 hpd1 |= DC_HPDx_INT_EN;
1525 }
1526 if (rdev->irq.hpd[1]) {
1527 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
1528 hpd2 |= DC_HPDx_INT_EN;
1529 }
1530 if (rdev->irq.hpd[2]) {
1531 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
1532 hpd3 |= DC_HPDx_INT_EN;
1533 }
1534 if (rdev->irq.hpd[3]) {
1535 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
1536 hpd4 |= DC_HPDx_INT_EN;
1537 }
1538 if (rdev->irq.hpd[4]) {
1539 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
1540 hpd5 |= DC_HPDx_INT_EN;
1541 }
1542 if (rdev->irq.hpd[5]) {
1543 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
1544 hpd6 |= DC_HPDx_INT_EN;
1545 }
Alex Deucher2031f772010-04-22 12:52:11 -04001546 if (rdev->irq.gui_idle) {
1547 DRM_DEBUG("gui idle\n");
1548 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
1549 }
Alex Deucher45f9a392010-03-24 13:55:51 -04001550
1551 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher2031f772010-04-22 12:52:11 -04001552 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04001553
1554 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
1555 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
1556 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
1557 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
1558 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
1559 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
1560
1561 WREG32(DC_HPD1_INT_CONTROL, hpd1);
1562 WREG32(DC_HPD2_INT_CONTROL, hpd2);
1563 WREG32(DC_HPD3_INT_CONTROL, hpd3);
1564 WREG32(DC_HPD4_INT_CONTROL, hpd4);
1565 WREG32(DC_HPD5_INT_CONTROL, hpd5);
1566 WREG32(DC_HPD6_INT_CONTROL, hpd6);
1567
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001568 return 0;
1569}
1570
Alex Deucher45f9a392010-03-24 13:55:51 -04001571static inline void evergreen_irq_ack(struct radeon_device *rdev,
1572 u32 *disp_int,
1573 u32 *disp_int_cont,
1574 u32 *disp_int_cont2,
1575 u32 *disp_int_cont3,
1576 u32 *disp_int_cont4,
1577 u32 *disp_int_cont5)
1578{
1579 u32 tmp;
1580
1581 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
1582 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
1583 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
1584 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
1585 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
1586 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
1587
1588 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
1589 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
1590 if (*disp_int & LB_D1_VLINE_INTERRUPT)
1591 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
1592
1593 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
1594 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
1595 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
1596 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
1597
1598 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
1599 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
1600 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
1601 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
1602
1603 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
1604 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
1605 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
1606 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
1607
1608 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
1609 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
1610 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
1611 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
1612
1613 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
1614 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
1615 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
1616 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
1617
1618 if (*disp_int & DC_HPD1_INTERRUPT) {
1619 tmp = RREG32(DC_HPD1_INT_CONTROL);
1620 tmp |= DC_HPDx_INT_ACK;
1621 WREG32(DC_HPD1_INT_CONTROL, tmp);
1622 }
1623 if (*disp_int_cont & DC_HPD2_INTERRUPT) {
1624 tmp = RREG32(DC_HPD2_INT_CONTROL);
1625 tmp |= DC_HPDx_INT_ACK;
1626 WREG32(DC_HPD2_INT_CONTROL, tmp);
1627 }
1628 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
1629 tmp = RREG32(DC_HPD3_INT_CONTROL);
1630 tmp |= DC_HPDx_INT_ACK;
1631 WREG32(DC_HPD3_INT_CONTROL, tmp);
1632 }
1633 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
1634 tmp = RREG32(DC_HPD4_INT_CONTROL);
1635 tmp |= DC_HPDx_INT_ACK;
1636 WREG32(DC_HPD4_INT_CONTROL, tmp);
1637 }
1638 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
1639 tmp = RREG32(DC_HPD5_INT_CONTROL);
1640 tmp |= DC_HPDx_INT_ACK;
1641 WREG32(DC_HPD5_INT_CONTROL, tmp);
1642 }
1643 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
1644 tmp = RREG32(DC_HPD5_INT_CONTROL);
1645 tmp |= DC_HPDx_INT_ACK;
1646 WREG32(DC_HPD6_INT_CONTROL, tmp);
1647 }
1648}
1649
1650void evergreen_irq_disable(struct radeon_device *rdev)
1651{
1652 u32 disp_int, disp_int_cont, disp_int_cont2;
1653 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1654
1655 r600_disable_interrupts(rdev);
1656 /* Wait and acknowledge irq */
1657 mdelay(1);
1658 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1659 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1660 evergreen_disable_interrupt_state(rdev);
1661}
1662
1663static void evergreen_irq_suspend(struct radeon_device *rdev)
1664{
1665 evergreen_irq_disable(rdev);
1666 r600_rlc_stop(rdev);
1667}
1668
1669static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
1670{
1671 u32 wptr, tmp;
1672
1673 /* XXX use writeback */
1674 wptr = RREG32(IH_RB_WPTR);
1675
1676 if (wptr & RB_OVERFLOW) {
1677 /* When a ring buffer overflow happen start parsing interrupt
1678 * from the last not overwritten vector (wptr + 16). Hopefully
1679 * this should allow us to catchup.
1680 */
1681 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
1682 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
1683 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
1684 tmp = RREG32(IH_RB_CNTL);
1685 tmp |= IH_WPTR_OVERFLOW_CLEAR;
1686 WREG32(IH_RB_CNTL, tmp);
1687 }
1688 return (wptr & rdev->ih.ptr_mask);
1689}
1690
1691int evergreen_irq_process(struct radeon_device *rdev)
1692{
1693 u32 wptr = evergreen_get_ih_wptr(rdev);
1694 u32 rptr = rdev->ih.rptr;
1695 u32 src_id, src_data;
1696 u32 ring_index;
1697 u32 disp_int, disp_int_cont, disp_int_cont2;
1698 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
1699 unsigned long flags;
1700 bool queue_hotplug = false;
1701
1702 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
1703 if (!rdev->ih.enabled)
1704 return IRQ_NONE;
1705
1706 spin_lock_irqsave(&rdev->ih.lock, flags);
1707
1708 if (rptr == wptr) {
1709 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1710 return IRQ_NONE;
1711 }
1712 if (rdev->shutdown) {
1713 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1714 return IRQ_NONE;
1715 }
1716
1717restart_ih:
1718 /* display interrupts */
1719 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
1720 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
1721
1722 rdev->ih.wptr = wptr;
1723 while (rptr != wptr) {
1724 /* wptr/rptr are in bytes! */
1725 ring_index = rptr / 4;
1726 src_id = rdev->ih.ring[ring_index] & 0xff;
1727 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
1728
1729 switch (src_id) {
1730 case 1: /* D1 vblank/vline */
1731 switch (src_data) {
1732 case 0: /* D1 vblank */
1733 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
1734 drm_handle_vblank(rdev->ddev, 0);
1735 wake_up(&rdev->irq.vblank_queue);
1736 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
1737 DRM_DEBUG("IH: D1 vblank\n");
1738 }
1739 break;
1740 case 1: /* D1 vline */
1741 if (disp_int & LB_D1_VLINE_INTERRUPT) {
1742 disp_int &= ~LB_D1_VLINE_INTERRUPT;
1743 DRM_DEBUG("IH: D1 vline\n");
1744 }
1745 break;
1746 default:
1747 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1748 break;
1749 }
1750 break;
1751 case 2: /* D2 vblank/vline */
1752 switch (src_data) {
1753 case 0: /* D2 vblank */
1754 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
1755 drm_handle_vblank(rdev->ddev, 1);
1756 wake_up(&rdev->irq.vblank_queue);
1757 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
1758 DRM_DEBUG("IH: D2 vblank\n");
1759 }
1760 break;
1761 case 1: /* D2 vline */
1762 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
1763 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
1764 DRM_DEBUG("IH: D2 vline\n");
1765 }
1766 break;
1767 default:
1768 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1769 break;
1770 }
1771 break;
1772 case 3: /* D3 vblank/vline */
1773 switch (src_data) {
1774 case 0: /* D3 vblank */
1775 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
1776 drm_handle_vblank(rdev->ddev, 2);
1777 wake_up(&rdev->irq.vblank_queue);
1778 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
1779 DRM_DEBUG("IH: D3 vblank\n");
1780 }
1781 break;
1782 case 1: /* D3 vline */
1783 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
1784 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
1785 DRM_DEBUG("IH: D3 vline\n");
1786 }
1787 break;
1788 default:
1789 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1790 break;
1791 }
1792 break;
1793 case 4: /* D4 vblank/vline */
1794 switch (src_data) {
1795 case 0: /* D4 vblank */
1796 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
1797 drm_handle_vblank(rdev->ddev, 3);
1798 wake_up(&rdev->irq.vblank_queue);
1799 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
1800 DRM_DEBUG("IH: D4 vblank\n");
1801 }
1802 break;
1803 case 1: /* D4 vline */
1804 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
1805 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
1806 DRM_DEBUG("IH: D4 vline\n");
1807 }
1808 break;
1809 default:
1810 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1811 break;
1812 }
1813 break;
1814 case 5: /* D5 vblank/vline */
1815 switch (src_data) {
1816 case 0: /* D5 vblank */
1817 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
1818 drm_handle_vblank(rdev->ddev, 4);
1819 wake_up(&rdev->irq.vblank_queue);
1820 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
1821 DRM_DEBUG("IH: D5 vblank\n");
1822 }
1823 break;
1824 case 1: /* D5 vline */
1825 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
1826 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
1827 DRM_DEBUG("IH: D5 vline\n");
1828 }
1829 break;
1830 default:
1831 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1832 break;
1833 }
1834 break;
1835 case 6: /* D6 vblank/vline */
1836 switch (src_data) {
1837 case 0: /* D6 vblank */
1838 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
1839 drm_handle_vblank(rdev->ddev, 5);
1840 wake_up(&rdev->irq.vblank_queue);
1841 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
1842 DRM_DEBUG("IH: D6 vblank\n");
1843 }
1844 break;
1845 case 1: /* D6 vline */
1846 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
1847 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
1848 DRM_DEBUG("IH: D6 vline\n");
1849 }
1850 break;
1851 default:
1852 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1853 break;
1854 }
1855 break;
1856 case 42: /* HPD hotplug */
1857 switch (src_data) {
1858 case 0:
1859 if (disp_int & DC_HPD1_INTERRUPT) {
1860 disp_int &= ~DC_HPD1_INTERRUPT;
1861 queue_hotplug = true;
1862 DRM_DEBUG("IH: HPD1\n");
1863 }
1864 break;
1865 case 1:
1866 if (disp_int_cont & DC_HPD2_INTERRUPT) {
1867 disp_int_cont &= ~DC_HPD2_INTERRUPT;
1868 queue_hotplug = true;
1869 DRM_DEBUG("IH: HPD2\n");
1870 }
1871 break;
1872 case 2:
1873 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
1874 disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
1875 queue_hotplug = true;
1876 DRM_DEBUG("IH: HPD3\n");
1877 }
1878 break;
1879 case 3:
1880 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
1881 disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
1882 queue_hotplug = true;
1883 DRM_DEBUG("IH: HPD4\n");
1884 }
1885 break;
1886 case 4:
1887 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
1888 disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
1889 queue_hotplug = true;
1890 DRM_DEBUG("IH: HPD5\n");
1891 }
1892 break;
1893 case 5:
1894 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
1895 disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
1896 queue_hotplug = true;
1897 DRM_DEBUG("IH: HPD6\n");
1898 }
1899 break;
1900 default:
1901 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1902 break;
1903 }
1904 break;
1905 case 176: /* CP_INT in ring buffer */
1906 case 177: /* CP_INT in IB1 */
1907 case 178: /* CP_INT in IB2 */
1908 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
1909 radeon_fence_process(rdev);
1910 break;
1911 case 181: /* CP EOP event */
1912 DRM_DEBUG("IH: CP EOP\n");
1913 break;
Alex Deucher2031f772010-04-22 12:52:11 -04001914 case 233: /* GUI IDLE */
1915 DRM_DEBUG("IH: CP EOP\n");
1916 rdev->pm.gui_idle = true;
1917 wake_up(&rdev->irq.idle_queue);
1918 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04001919 default:
1920 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
1921 break;
1922 }
1923
1924 /* wptr/rptr are in bytes! */
1925 rptr += 16;
1926 rptr &= rdev->ih.ptr_mask;
1927 }
1928 /* make sure wptr hasn't changed while processing */
1929 wptr = evergreen_get_ih_wptr(rdev);
1930 if (wptr != rdev->ih.wptr)
1931 goto restart_ih;
1932 if (queue_hotplug)
1933 queue_work(rdev->wq, &rdev->hotplug_work);
1934 rdev->ih.rptr = rptr;
1935 WREG32(IH_RB_RPTR, rdev->ih.rptr);
1936 spin_unlock_irqrestore(&rdev->ih.lock, flags);
1937 return IRQ_HANDLED;
1938}
1939
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001940static int evergreen_startup(struct radeon_device *rdev)
1941{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001942 int r;
1943
1944 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1945 r = r600_init_microcode(rdev);
1946 if (r) {
1947 DRM_ERROR("Failed to load firmware!\n");
1948 return r;
1949 }
1950 }
Alex Deucherfe251e22010-03-24 13:36:43 -04001951
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001952 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001953 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04001954 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001955 } else {
1956 r = evergreen_pcie_gart_enable(rdev);
1957 if (r)
1958 return r;
1959 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001960 evergreen_gpu_init(rdev);
1961#if 0
1962 if (!rdev->r600_blit.shader_obj) {
1963 r = r600_blit_init(rdev);
1964 if (r) {
1965 DRM_ERROR("radeon: failed blitter (%d).\n", r);
1966 return r;
1967 }
1968 }
1969
1970 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1971 if (unlikely(r != 0))
1972 return r;
1973 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1974 &rdev->r600_blit.shader_gpu_addr);
1975 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1976 if (r) {
1977 DRM_ERROR("failed to pin blit object %d\n", r);
1978 return r;
1979 }
Alex Deucher45f9a392010-03-24 13:55:51 -04001980#endif
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001981
1982 /* Enable IRQ */
1983 r = r600_irq_init(rdev);
1984 if (r) {
1985 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1986 radeon_irq_kms_fini(rdev);
1987 return r;
1988 }
Alex Deucher45f9a392010-03-24 13:55:51 -04001989 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001990
1991 r = radeon_ring_init(rdev, rdev->cp.ring_size);
1992 if (r)
1993 return r;
1994 r = evergreen_cp_load_microcode(rdev);
1995 if (r)
1996 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04001997 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001998 if (r)
1999 return r;
2000 /* write back buffer are not vital so don't worry about failure */
2001 r600_wb_enable(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002002
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002003 return 0;
2004}
2005
2006int evergreen_resume(struct radeon_device *rdev)
2007{
2008 int r;
2009
2010 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2011 * posting will perform necessary task to bring back GPU into good
2012 * shape.
2013 */
2014 /* post card */
2015 atom_asic_init(rdev->mode_info.atom_context);
2016 /* Initialize clocks */
2017 r = radeon_clocks_init(rdev);
2018 if (r) {
2019 return r;
2020 }
2021
2022 r = evergreen_startup(rdev);
2023 if (r) {
2024 DRM_ERROR("r600 startup failed on resume\n");
2025 return r;
2026 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002027
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002028 r = r600_ib_test(rdev);
2029 if (r) {
2030 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2031 return r;
2032 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002033
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002034 return r;
2035
2036}
2037
2038int evergreen_suspend(struct radeon_device *rdev)
2039{
2040#if 0
2041 int r;
Alex Deucherfe251e22010-03-24 13:36:43 -04002042#endif
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002043 /* FIXME: we should wait for ring to be empty */
2044 r700_cp_stop(rdev);
2045 rdev->cp.ready = false;
Alex Deucher45f9a392010-03-24 13:55:51 -04002046 evergreen_irq_suspend(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002047 r600_wb_disable(rdev);
2048 evergreen_pcie_gart_disable(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002049#if 0
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002050 /* unpin shaders bo */
2051 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
2052 if (likely(r == 0)) {
2053 radeon_bo_unpin(rdev->r600_blit.shader_obj);
2054 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2055 }
2056#endif
2057 return 0;
2058}
2059
2060static bool evergreen_card_posted(struct radeon_device *rdev)
2061{
2062 u32 reg;
2063
2064 /* first check CRTCs */
2065 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2066 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2067 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2068 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2069 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2070 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
2071 if (reg & EVERGREEN_CRTC_MASTER_EN)
2072 return true;
2073
2074 /* then check MEM_SIZE, in case the crtcs are off */
2075 if (RREG32(CONFIG_MEMSIZE))
2076 return true;
2077
2078 return false;
2079}
2080
2081/* Plan is to move initialization in that function and use
2082 * helper function so that radeon_device_init pretty much
2083 * do nothing more than calling asic specific function. This
2084 * should also allow to remove a bunch of callback function
2085 * like vram_info.
2086 */
2087int evergreen_init(struct radeon_device *rdev)
2088{
2089 int r;
2090
2091 r = radeon_dummy_page_init(rdev);
2092 if (r)
2093 return r;
2094 /* This don't do much */
2095 r = radeon_gem_init(rdev);
2096 if (r)
2097 return r;
2098 /* Read BIOS */
2099 if (!radeon_get_bios(rdev)) {
2100 if (ASIC_IS_AVIVO(rdev))
2101 return -EINVAL;
2102 }
2103 /* Must be an ATOMBIOS */
2104 if (!rdev->is_atom_bios) {
2105 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2106 return -EINVAL;
2107 }
2108 r = radeon_atombios_init(rdev);
2109 if (r)
2110 return r;
2111 /* Post card if necessary */
2112 if (!evergreen_card_posted(rdev)) {
2113 if (!rdev->bios) {
2114 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2115 return -EINVAL;
2116 }
2117 DRM_INFO("GPU not posted. posting now...\n");
2118 atom_asic_init(rdev->mode_info.atom_context);
2119 }
2120 /* Initialize scratch registers */
2121 r600_scratch_init(rdev);
2122 /* Initialize surface registers */
2123 radeon_surface_init(rdev);
2124 /* Initialize clocks */
2125 radeon_get_clock_info(rdev->ddev);
2126 r = radeon_clocks_init(rdev);
2127 if (r)
2128 return r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002129 /* Fence driver */
2130 r = radeon_fence_driver_init(rdev);
2131 if (r)
2132 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00002133 /* initialize AGP */
2134 if (rdev->flags & RADEON_IS_AGP) {
2135 r = radeon_agp_init(rdev);
2136 if (r)
2137 radeon_agp_disable(rdev);
2138 }
2139 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002140 r = evergreen_mc_init(rdev);
2141 if (r)
2142 return r;
2143 /* Memory manager */
2144 r = radeon_bo_init(rdev);
2145 if (r)
2146 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04002147
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002148 r = radeon_irq_kms_init(rdev);
2149 if (r)
2150 return r;
2151
2152 rdev->cp.ring_obj = NULL;
2153 r600_ring_init(rdev, 1024 * 1024);
2154
2155 rdev->ih.ring_obj = NULL;
2156 r600_ih_ring_init(rdev, 64 * 1024);
2157
2158 r = r600_pcie_gart_init(rdev);
2159 if (r)
2160 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002161
Alex Deucher148a03b2010-06-03 19:00:03 -04002162 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002163 r = evergreen_startup(rdev);
2164 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04002165 dev_err(rdev->dev, "disabling GPU acceleration\n");
2166 r700_cp_fini(rdev);
2167 r600_wb_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002168 r600_irq_fini(rdev);
2169 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002170 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002171 rdev->accel_working = false;
2172 }
2173 if (rdev->accel_working) {
2174 r = radeon_ib_pool_init(rdev);
2175 if (r) {
2176 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2177 rdev->accel_working = false;
2178 }
2179 r = r600_ib_test(rdev);
2180 if (r) {
2181 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2182 rdev->accel_working = false;
2183 }
2184 }
2185 return 0;
2186}
2187
2188void evergreen_fini(struct radeon_device *rdev)
2189{
Alex Deucher45f9a392010-03-24 13:55:51 -04002190 /*r600_blit_fini(rdev);*/
2191 r700_cp_fini(rdev);
2192 r600_wb_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002193 r600_irq_fini(rdev);
2194 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002195 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002196 radeon_gem_fini(rdev);
2197 radeon_fence_driver_fini(rdev);
2198 radeon_clocks_fini(rdev);
2199 radeon_agp_fini(rdev);
2200 radeon_bo_fini(rdev);
2201 radeon_atombios_fini(rdev);
2202 kfree(rdev->bios);
2203 rdev->bios = NULL;
2204 radeon_dummy_page_fini(rdev);
2205}