blob: 26b219bb138830d7f68d333a8d3f032dd29da9f9 [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include "drmP.h"
27#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000028#include "radeon_asic.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050029#include "radeon_drm.h"
Alex Deucher0fcdb612010-03-24 13:20:41 -040030#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050031#include "atom.h"
32#include "avivod.h"
33#include "evergreen_reg.h"
34
35static void evergreen_gpu_init(struct radeon_device *rdev);
36void evergreen_fini(struct radeon_device *rdev);
37
38bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
39{
40 bool connected = false;
41 /* XXX */
42 return connected;
43}
44
45void evergreen_hpd_set_polarity(struct radeon_device *rdev,
46 enum radeon_hpd_id hpd)
47{
48 /* XXX */
49}
50
51void evergreen_hpd_init(struct radeon_device *rdev)
52{
53 /* XXX */
54}
55
56
57void evergreen_bandwidth_update(struct radeon_device *rdev)
58{
59 /* XXX */
60}
61
62void evergreen_hpd_fini(struct radeon_device *rdev)
63{
64 /* XXX */
65}
66
67static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
68{
69 unsigned i;
70 u32 tmp;
71
72 for (i = 0; i < rdev->usec_timeout; i++) {
73 /* read MC_STATUS */
74 tmp = RREG32(SRBM_STATUS) & 0x1F00;
75 if (!tmp)
76 return 0;
77 udelay(1);
78 }
79 return -1;
80}
81
82/*
83 * GART
84 */
Alex Deucher0fcdb612010-03-24 13:20:41 -040085void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
86{
87 unsigned i;
88 u32 tmp;
89
90 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
91 for (i = 0; i < rdev->usec_timeout; i++) {
92 /* read MC_STATUS */
93 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
94 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
95 if (tmp == 2) {
96 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
97 return;
98 }
99 if (tmp) {
100 return;
101 }
102 udelay(1);
103 }
104}
105
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500106int evergreen_pcie_gart_enable(struct radeon_device *rdev)
107{
108 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400109 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500110
111 if (rdev->gart.table.vram.robj == NULL) {
112 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
113 return -EINVAL;
114 }
115 r = radeon_gart_table_vram_pin(rdev);
116 if (r)
117 return r;
Dave Airlie82568562010-02-05 16:00:07 +1000118 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500119 /* Setup L2 cache */
120 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
121 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
122 EFFECTIVE_L2_QUEUE_SIZE(7));
123 WREG32(VM_L2_CNTL2, 0);
124 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
125 /* Setup TLB control */
126 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
127 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
128 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
129 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
130 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
131 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
132 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
133 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
134 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
135 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
136 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
137 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
138 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
139 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
140 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
141 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
142 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
143 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -0400144 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500145
Alex Deucher0fcdb612010-03-24 13:20:41 -0400146 evergreen_pcie_gart_tlb_flush(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500147 rdev->gart.ready = true;
148 return 0;
149}
150
151void evergreen_pcie_gart_disable(struct radeon_device *rdev)
152{
153 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400154 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500155
156 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400157 WREG32(VM_CONTEXT0_CNTL, 0);
158 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500159
160 /* Setup L2 cache */
161 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
162 EFFECTIVE_L2_QUEUE_SIZE(7));
163 WREG32(VM_L2_CNTL2, 0);
164 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
165 /* Setup TLB control */
166 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
167 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
168 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
169 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
170 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
171 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
172 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
173 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
174 if (rdev->gart.table.vram.robj) {
175 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
176 if (likely(r == 0)) {
177 radeon_bo_kunmap(rdev->gart.table.vram.robj);
178 radeon_bo_unpin(rdev->gart.table.vram.robj);
179 radeon_bo_unreserve(rdev->gart.table.vram.robj);
180 }
181 }
182}
183
184void evergreen_pcie_gart_fini(struct radeon_device *rdev)
185{
186 evergreen_pcie_gart_disable(rdev);
187 radeon_gart_table_vram_free(rdev);
188 radeon_gart_fini(rdev);
189}
190
191
192void evergreen_agp_enable(struct radeon_device *rdev)
193{
194 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500195
196 /* Setup L2 cache */
197 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
198 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
199 EFFECTIVE_L2_QUEUE_SIZE(7));
200 WREG32(VM_L2_CNTL2, 0);
201 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
202 /* Setup TLB control */
203 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
204 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
205 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
206 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
207 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
208 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
209 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
210 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
211 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
212 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
213 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400214 WREG32(VM_CONTEXT0_CNTL, 0);
215 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500216}
217
218static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
219{
220 save->vga_control[0] = RREG32(D1VGA_CONTROL);
221 save->vga_control[1] = RREG32(D2VGA_CONTROL);
222 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
223 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
224 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
225 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
226 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
227 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
228 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
229 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
230 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
231 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
232 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
233 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
234
235 /* Stop all video */
236 WREG32(VGA_RENDER_CONTROL, 0);
237 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
238 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
239 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
240 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
241 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
242 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
243 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
244 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
245 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
246 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
247 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
248 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
249 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
250 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
251 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
252 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
253 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
254 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
255
256 WREG32(D1VGA_CONTROL, 0);
257 WREG32(D2VGA_CONTROL, 0);
258 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
259 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
260 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
261 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
262}
263
264static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
265{
266 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
267 upper_32_bits(rdev->mc.vram_start));
268 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
269 upper_32_bits(rdev->mc.vram_start));
270 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
271 (u32)rdev->mc.vram_start);
272 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
273 (u32)rdev->mc.vram_start);
274
275 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
276 upper_32_bits(rdev->mc.vram_start));
277 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
278 upper_32_bits(rdev->mc.vram_start));
279 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
280 (u32)rdev->mc.vram_start);
281 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
282 (u32)rdev->mc.vram_start);
283
284 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
285 upper_32_bits(rdev->mc.vram_start));
286 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
287 upper_32_bits(rdev->mc.vram_start));
288 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
289 (u32)rdev->mc.vram_start);
290 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
291 (u32)rdev->mc.vram_start);
292
293 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
294 upper_32_bits(rdev->mc.vram_start));
295 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
296 upper_32_bits(rdev->mc.vram_start));
297 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
298 (u32)rdev->mc.vram_start);
299 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
300 (u32)rdev->mc.vram_start);
301
302 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
303 upper_32_bits(rdev->mc.vram_start));
304 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
305 upper_32_bits(rdev->mc.vram_start));
306 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
307 (u32)rdev->mc.vram_start);
308 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
309 (u32)rdev->mc.vram_start);
310
311 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
312 upper_32_bits(rdev->mc.vram_start));
313 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
314 upper_32_bits(rdev->mc.vram_start));
315 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
316 (u32)rdev->mc.vram_start);
317 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
318 (u32)rdev->mc.vram_start);
319
320 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
321 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
322 /* Unlock host access */
323 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
324 mdelay(1);
325 /* Restore video state */
326 WREG32(D1VGA_CONTROL, save->vga_control[0]);
327 WREG32(D2VGA_CONTROL, save->vga_control[1]);
328 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
329 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
330 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
331 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
332 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
333 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
334 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
335 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
336 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
337 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
338 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
339 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
340 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
341 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
342 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
343 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
344 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
345 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
346 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
347 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
348 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
349 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
350 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
351}
352
353static void evergreen_mc_program(struct radeon_device *rdev)
354{
355 struct evergreen_mc_save save;
356 u32 tmp;
357 int i, j;
358
359 /* Initialize HDP */
360 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
361 WREG32((0x2c14 + j), 0x00000000);
362 WREG32((0x2c18 + j), 0x00000000);
363 WREG32((0x2c1c + j), 0x00000000);
364 WREG32((0x2c20 + j), 0x00000000);
365 WREG32((0x2c24 + j), 0x00000000);
366 }
367 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
368
369 evergreen_mc_stop(rdev, &save);
370 if (evergreen_mc_wait_for_idle(rdev)) {
371 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
372 }
373 /* Lockout access through VGA aperture*/
374 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
375 /* Update configuration */
376 if (rdev->flags & RADEON_IS_AGP) {
377 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
378 /* VRAM before AGP */
379 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
380 rdev->mc.vram_start >> 12);
381 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
382 rdev->mc.gtt_end >> 12);
383 } else {
384 /* VRAM after AGP */
385 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
386 rdev->mc.gtt_start >> 12);
387 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
388 rdev->mc.vram_end >> 12);
389 }
390 } else {
391 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
392 rdev->mc.vram_start >> 12);
393 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
394 rdev->mc.vram_end >> 12);
395 }
396 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
397 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
398 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
399 WREG32(MC_VM_FB_LOCATION, tmp);
400 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
401 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
402 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
403 if (rdev->flags & RADEON_IS_AGP) {
404 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
405 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
406 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
407 } else {
408 WREG32(MC_VM_AGP_BASE, 0);
409 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
410 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
411 }
412 if (evergreen_mc_wait_for_idle(rdev)) {
413 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
414 }
415 evergreen_mc_resume(rdev, &save);
416 /* we need to own VRAM, so turn off the VGA renderer here
417 * to stop it overwriting our objects */
418 rv515_vga_render_disable(rdev);
419}
420
421#if 0
422/*
423 * CP.
424 */
425static void evergreen_cp_stop(struct radeon_device *rdev)
426{
427 /* XXX */
428}
429
430
431static int evergreen_cp_load_microcode(struct radeon_device *rdev)
432{
433 /* XXX */
434
435 return 0;
436}
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400437#endif
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500438
439/*
440 * Core functions
441 */
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400442static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
443 u32 num_tile_pipes,
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500444 u32 num_backends,
445 u32 backend_disable_mask)
446{
447 u32 backend_map = 0;
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400448 u32 enabled_backends_mask = 0;
449 u32 enabled_backends_count = 0;
450 u32 cur_pipe;
451 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
452 u32 cur_backend = 0;
453 u32 i;
454 bool force_no_swizzle;
455
456 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
457 num_tile_pipes = EVERGREEN_MAX_PIPES;
458 if (num_tile_pipes < 1)
459 num_tile_pipes = 1;
460 if (num_backends > EVERGREEN_MAX_BACKENDS)
461 num_backends = EVERGREEN_MAX_BACKENDS;
462 if (num_backends < 1)
463 num_backends = 1;
464
465 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
466 if (((backend_disable_mask >> i) & 1) == 0) {
467 enabled_backends_mask |= (1 << i);
468 ++enabled_backends_count;
469 }
470 if (enabled_backends_count == num_backends)
471 break;
472 }
473
474 if (enabled_backends_count == 0) {
475 enabled_backends_mask = 1;
476 enabled_backends_count = 1;
477 }
478
479 if (enabled_backends_count != num_backends)
480 num_backends = enabled_backends_count;
481
482 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
483 switch (rdev->family) {
484 case CHIP_CEDAR:
485 case CHIP_REDWOOD:
486 force_no_swizzle = false;
487 break;
488 case CHIP_CYPRESS:
489 case CHIP_HEMLOCK:
490 case CHIP_JUNIPER:
491 default:
492 force_no_swizzle = true;
493 break;
494 }
495 if (force_no_swizzle) {
496 bool last_backend_enabled = false;
497
498 force_no_swizzle = false;
499 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
500 if (((enabled_backends_mask >> i) & 1) == 1) {
501 if (last_backend_enabled)
502 force_no_swizzle = true;
503 last_backend_enabled = true;
504 } else
505 last_backend_enabled = false;
506 }
507 }
508
509 switch (num_tile_pipes) {
510 case 1:
511 case 3:
512 case 5:
513 case 7:
514 DRM_ERROR("odd number of pipes!\n");
515 break;
516 case 2:
517 swizzle_pipe[0] = 0;
518 swizzle_pipe[1] = 1;
519 break;
520 case 4:
521 if (force_no_swizzle) {
522 swizzle_pipe[0] = 0;
523 swizzle_pipe[1] = 1;
524 swizzle_pipe[2] = 2;
525 swizzle_pipe[3] = 3;
526 } else {
527 swizzle_pipe[0] = 0;
528 swizzle_pipe[1] = 2;
529 swizzle_pipe[2] = 1;
530 swizzle_pipe[3] = 3;
531 }
532 break;
533 case 6:
534 if (force_no_swizzle) {
535 swizzle_pipe[0] = 0;
536 swizzle_pipe[1] = 1;
537 swizzle_pipe[2] = 2;
538 swizzle_pipe[3] = 3;
539 swizzle_pipe[4] = 4;
540 swizzle_pipe[5] = 5;
541 } else {
542 swizzle_pipe[0] = 0;
543 swizzle_pipe[1] = 2;
544 swizzle_pipe[2] = 4;
545 swizzle_pipe[3] = 1;
546 swizzle_pipe[4] = 3;
547 swizzle_pipe[5] = 5;
548 }
549 break;
550 case 8:
551 if (force_no_swizzle) {
552 swizzle_pipe[0] = 0;
553 swizzle_pipe[1] = 1;
554 swizzle_pipe[2] = 2;
555 swizzle_pipe[3] = 3;
556 swizzle_pipe[4] = 4;
557 swizzle_pipe[5] = 5;
558 swizzle_pipe[6] = 6;
559 swizzle_pipe[7] = 7;
560 } else {
561 swizzle_pipe[0] = 0;
562 swizzle_pipe[1] = 2;
563 swizzle_pipe[2] = 4;
564 swizzle_pipe[3] = 6;
565 swizzle_pipe[4] = 1;
566 swizzle_pipe[5] = 3;
567 swizzle_pipe[6] = 5;
568 swizzle_pipe[7] = 7;
569 }
570 break;
571 }
572
573 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
574 while (((1 << cur_backend) & enabled_backends_mask) == 0)
575 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
576
577 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
578
579 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
580 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500581
582 return backend_map;
583}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500584
585static void evergreen_gpu_init(struct radeon_device *rdev)
586{
Alex Deucher32fcdbf2010-03-24 13:33:47 -0400587 u32 cc_rb_backend_disable = 0;
588 u32 cc_gc_shader_pipe_config;
589 u32 gb_addr_config = 0;
590 u32 mc_shared_chmap, mc_arb_ramcfg;
591 u32 gb_backend_map;
592 u32 grbm_gfx_index;
593 u32 sx_debug_1;
594 u32 smx_dc_ctl0;
595 u32 sq_config;
596 u32 sq_lds_resource_mgmt;
597 u32 sq_gpr_resource_mgmt_1;
598 u32 sq_gpr_resource_mgmt_2;
599 u32 sq_gpr_resource_mgmt_3;
600 u32 sq_thread_resource_mgmt;
601 u32 sq_thread_resource_mgmt_2;
602 u32 sq_stack_resource_mgmt_1;
603 u32 sq_stack_resource_mgmt_2;
604 u32 sq_stack_resource_mgmt_3;
605 u32 vgt_cache_invalidation;
606 u32 hdp_host_path_cntl;
607 int i, j, num_shader_engines, ps_thread_count;
608
609 switch (rdev->family) {
610 case CHIP_CYPRESS:
611 case CHIP_HEMLOCK:
612 rdev->config.evergreen.num_ses = 2;
613 rdev->config.evergreen.max_pipes = 4;
614 rdev->config.evergreen.max_tile_pipes = 8;
615 rdev->config.evergreen.max_simds = 10;
616 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
617 rdev->config.evergreen.max_gprs = 256;
618 rdev->config.evergreen.max_threads = 248;
619 rdev->config.evergreen.max_gs_threads = 32;
620 rdev->config.evergreen.max_stack_entries = 512;
621 rdev->config.evergreen.sx_num_of_sets = 4;
622 rdev->config.evergreen.sx_max_export_size = 256;
623 rdev->config.evergreen.sx_max_export_pos_size = 64;
624 rdev->config.evergreen.sx_max_export_smx_size = 192;
625 rdev->config.evergreen.max_hw_contexts = 8;
626 rdev->config.evergreen.sq_num_cf_insts = 2;
627
628 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
629 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
630 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
631 break;
632 case CHIP_JUNIPER:
633 rdev->config.evergreen.num_ses = 1;
634 rdev->config.evergreen.max_pipes = 4;
635 rdev->config.evergreen.max_tile_pipes = 4;
636 rdev->config.evergreen.max_simds = 10;
637 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
638 rdev->config.evergreen.max_gprs = 256;
639 rdev->config.evergreen.max_threads = 248;
640 rdev->config.evergreen.max_gs_threads = 32;
641 rdev->config.evergreen.max_stack_entries = 512;
642 rdev->config.evergreen.sx_num_of_sets = 4;
643 rdev->config.evergreen.sx_max_export_size = 256;
644 rdev->config.evergreen.sx_max_export_pos_size = 64;
645 rdev->config.evergreen.sx_max_export_smx_size = 192;
646 rdev->config.evergreen.max_hw_contexts = 8;
647 rdev->config.evergreen.sq_num_cf_insts = 2;
648
649 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
650 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
651 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
652 break;
653 case CHIP_REDWOOD:
654 rdev->config.evergreen.num_ses = 1;
655 rdev->config.evergreen.max_pipes = 4;
656 rdev->config.evergreen.max_tile_pipes = 4;
657 rdev->config.evergreen.max_simds = 5;
658 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
659 rdev->config.evergreen.max_gprs = 256;
660 rdev->config.evergreen.max_threads = 248;
661 rdev->config.evergreen.max_gs_threads = 32;
662 rdev->config.evergreen.max_stack_entries = 256;
663 rdev->config.evergreen.sx_num_of_sets = 4;
664 rdev->config.evergreen.sx_max_export_size = 256;
665 rdev->config.evergreen.sx_max_export_pos_size = 64;
666 rdev->config.evergreen.sx_max_export_smx_size = 192;
667 rdev->config.evergreen.max_hw_contexts = 8;
668 rdev->config.evergreen.sq_num_cf_insts = 2;
669
670 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
671 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
672 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
673 break;
674 case CHIP_CEDAR:
675 default:
676 rdev->config.evergreen.num_ses = 1;
677 rdev->config.evergreen.max_pipes = 2;
678 rdev->config.evergreen.max_tile_pipes = 2;
679 rdev->config.evergreen.max_simds = 2;
680 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
681 rdev->config.evergreen.max_gprs = 256;
682 rdev->config.evergreen.max_threads = 192;
683 rdev->config.evergreen.max_gs_threads = 16;
684 rdev->config.evergreen.max_stack_entries = 256;
685 rdev->config.evergreen.sx_num_of_sets = 4;
686 rdev->config.evergreen.sx_max_export_size = 128;
687 rdev->config.evergreen.sx_max_export_pos_size = 32;
688 rdev->config.evergreen.sx_max_export_smx_size = 96;
689 rdev->config.evergreen.max_hw_contexts = 4;
690 rdev->config.evergreen.sq_num_cf_insts = 1;
691
692 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
693 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
694 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
695 break;
696 }
697
698 /* Initialize HDP */
699 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
700 WREG32((0x2c14 + j), 0x00000000);
701 WREG32((0x2c18 + j), 0x00000000);
702 WREG32((0x2c1c + j), 0x00000000);
703 WREG32((0x2c20 + j), 0x00000000);
704 WREG32((0x2c24 + j), 0x00000000);
705 }
706
707 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
708
709 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
710
711 cc_gc_shader_pipe_config |=
712 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
713 & EVERGREEN_MAX_PIPES_MASK);
714 cc_gc_shader_pipe_config |=
715 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
716 & EVERGREEN_MAX_SIMDS_MASK);
717
718 cc_rb_backend_disable =
719 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
720 & EVERGREEN_MAX_BACKENDS_MASK);
721
722
723 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
724 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
725
726 switch (rdev->config.evergreen.max_tile_pipes) {
727 case 1:
728 default:
729 gb_addr_config |= NUM_PIPES(0);
730 break;
731 case 2:
732 gb_addr_config |= NUM_PIPES(1);
733 break;
734 case 4:
735 gb_addr_config |= NUM_PIPES(2);
736 break;
737 case 8:
738 gb_addr_config |= NUM_PIPES(3);
739 break;
740 }
741
742 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
743 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
744 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
745 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
746 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
747 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
748
749 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
750 gb_addr_config |= ROW_SIZE(2);
751 else
752 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
753
754 if (rdev->ddev->pdev->device == 0x689e) {
755 u32 efuse_straps_4;
756 u32 efuse_straps_3;
757 u8 efuse_box_bit_131_124;
758
759 WREG32(RCU_IND_INDEX, 0x204);
760 efuse_straps_4 = RREG32(RCU_IND_DATA);
761 WREG32(RCU_IND_INDEX, 0x203);
762 efuse_straps_3 = RREG32(RCU_IND_DATA);
763 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
764
765 switch(efuse_box_bit_131_124) {
766 case 0x00:
767 gb_backend_map = 0x76543210;
768 break;
769 case 0x55:
770 gb_backend_map = 0x77553311;
771 break;
772 case 0x56:
773 gb_backend_map = 0x77553300;
774 break;
775 case 0x59:
776 gb_backend_map = 0x77552211;
777 break;
778 case 0x66:
779 gb_backend_map = 0x77443300;
780 break;
781 case 0x99:
782 gb_backend_map = 0x66552211;
783 break;
784 case 0x5a:
785 gb_backend_map = 0x77552200;
786 break;
787 case 0xaa:
788 gb_backend_map = 0x66442200;
789 break;
790 case 0x95:
791 gb_backend_map = 0x66553311;
792 break;
793 default:
794 DRM_ERROR("bad backend map, using default\n");
795 gb_backend_map =
796 evergreen_get_tile_pipe_to_backend_map(rdev,
797 rdev->config.evergreen.max_tile_pipes,
798 rdev->config.evergreen.max_backends,
799 ((EVERGREEN_MAX_BACKENDS_MASK <<
800 rdev->config.evergreen.max_backends) &
801 EVERGREEN_MAX_BACKENDS_MASK));
802 break;
803 }
804 } else if (rdev->ddev->pdev->device == 0x68b9) {
805 u32 efuse_straps_3;
806 u8 efuse_box_bit_127_124;
807
808 WREG32(RCU_IND_INDEX, 0x203);
809 efuse_straps_3 = RREG32(RCU_IND_DATA);
810 efuse_box_bit_127_124 = (u8)(efuse_straps_3 & 0xF0000000) >> 28;
811
812 switch(efuse_box_bit_127_124) {
813 case 0x0:
814 gb_backend_map = 0x00003210;
815 break;
816 case 0x5:
817 case 0x6:
818 case 0x9:
819 case 0xa:
820 gb_backend_map = 0x00003311;
821 break;
822 default:
823 DRM_ERROR("bad backend map, using default\n");
824 gb_backend_map =
825 evergreen_get_tile_pipe_to_backend_map(rdev,
826 rdev->config.evergreen.max_tile_pipes,
827 rdev->config.evergreen.max_backends,
828 ((EVERGREEN_MAX_BACKENDS_MASK <<
829 rdev->config.evergreen.max_backends) &
830 EVERGREEN_MAX_BACKENDS_MASK));
831 break;
832 }
833 } else
834 gb_backend_map =
835 evergreen_get_tile_pipe_to_backend_map(rdev,
836 rdev->config.evergreen.max_tile_pipes,
837 rdev->config.evergreen.max_backends,
838 ((EVERGREEN_MAX_BACKENDS_MASK <<
839 rdev->config.evergreen.max_backends) &
840 EVERGREEN_MAX_BACKENDS_MASK));
841
842 WREG32(GB_BACKEND_MAP, gb_backend_map);
843 WREG32(GB_ADDR_CONFIG, gb_addr_config);
844 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
845 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
846
847 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
848 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
849
850 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
851 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
852 u32 sp = cc_gc_shader_pipe_config;
853 u32 gfx = grbm_gfx_index | SE_INDEX(i);
854
855 if (i == num_shader_engines) {
856 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
857 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
858 }
859
860 WREG32(GRBM_GFX_INDEX, gfx);
861 WREG32(RLC_GFX_INDEX, gfx);
862
863 WREG32(CC_RB_BACKEND_DISABLE, rb);
864 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
865 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
866 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
867 }
868
869 grbm_gfx_index |= SE_BROADCAST_WRITES;
870 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
871 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
872
873 WREG32(CGTS_SYS_TCC_DISABLE, 0);
874 WREG32(CGTS_TCC_DISABLE, 0);
875 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
876 WREG32(CGTS_USER_TCC_DISABLE, 0);
877
878 /* set HW defaults for 3D engine */
879 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
880 ROQ_IB2_START(0x2b)));
881
882 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
883
884 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
885 SYNC_GRADIENT |
886 SYNC_WALKER |
887 SYNC_ALIGNER));
888
889 sx_debug_1 = RREG32(SX_DEBUG_1);
890 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
891 WREG32(SX_DEBUG_1, sx_debug_1);
892
893
894 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
895 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
896 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
897 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
898
899 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
900 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
901 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
902
903 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
904 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
905 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
906
907 WREG32(VGT_NUM_INSTANCES, 1);
908 WREG32(SPI_CONFIG_CNTL, 0);
909 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
910 WREG32(CP_PERFMON_CNTL, 0);
911
912 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
913 FETCH_FIFO_HIWATER(0x4) |
914 DONE_FIFO_HIWATER(0xe0) |
915 ALU_UPDATE_FIFO_HIWATER(0x8)));
916
917 sq_config = RREG32(SQ_CONFIG);
918 sq_config &= ~(PS_PRIO(3) |
919 VS_PRIO(3) |
920 GS_PRIO(3) |
921 ES_PRIO(3));
922 sq_config |= (VC_ENABLE |
923 EXPORT_SRC_C |
924 PS_PRIO(0) |
925 VS_PRIO(1) |
926 GS_PRIO(2) |
927 ES_PRIO(3));
928
929 if (rdev->family == CHIP_CEDAR)
930 /* no vertex cache */
931 sq_config &= ~VC_ENABLE;
932
933 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
934
935 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
936 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
937 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
938 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
939 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
940 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
941 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
942
943 if (rdev->family == CHIP_CEDAR)
944 ps_thread_count = 96;
945 else
946 ps_thread_count = 128;
947
948 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
949 sq_thread_resource_mgmt |= NUM_VS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
950 sq_thread_resource_mgmt |= NUM_GS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
951 sq_thread_resource_mgmt |= NUM_ES_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
952 sq_thread_resource_mgmt_2 = NUM_HS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
953 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS(((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8;
954
955 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
956 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
957 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
958 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
959 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
960 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
961
962 WREG32(SQ_CONFIG, sq_config);
963 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
964 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
965 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
966 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
967 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
968 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
969 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
970 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
971 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
972 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
973
974 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
975 FORCE_EOV_MAX_REZ_CNT(255)));
976
977 if (rdev->family == CHIP_CEDAR)
978 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
979 else
980 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
981 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
982 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
983
984 WREG32(VGT_GS_VERTEX_REUSE, 16);
985 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
986
987 WREG32(CB_PERF_CTR0_SEL_0, 0);
988 WREG32(CB_PERF_CTR0_SEL_1, 0);
989 WREG32(CB_PERF_CTR1_SEL_0, 0);
990 WREG32(CB_PERF_CTR1_SEL_1, 0);
991 WREG32(CB_PERF_CTR2_SEL_0, 0);
992 WREG32(CB_PERF_CTR2_SEL_1, 0);
993 WREG32(CB_PERF_CTR3_SEL_0, 0);
994 WREG32(CB_PERF_CTR3_SEL_1, 0);
995
996 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
997 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
998
999 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1000
1001 udelay(50);
1002
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001003}
1004
1005int evergreen_mc_init(struct radeon_device *rdev)
1006{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001007 u32 tmp;
1008 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001009
1010 /* Get VRAM informations */
1011 rdev->mc.vram_is_ddr = true;
1012 tmp = RREG32(MC_ARB_RAMCFG);
1013 if (tmp & CHANSIZE_OVERRIDE) {
1014 chansize = 16;
1015 } else if (tmp & CHANSIZE_MASK) {
1016 chansize = 64;
1017 } else {
1018 chansize = 32;
1019 }
1020 tmp = RREG32(MC_SHARED_CHMAP);
1021 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1022 case 0:
1023 default:
1024 numchan = 1;
1025 break;
1026 case 1:
1027 numchan = 2;
1028 break;
1029 case 2:
1030 numchan = 4;
1031 break;
1032 case 3:
1033 numchan = 8;
1034 break;
1035 }
1036 rdev->mc.vram_width = numchan * chansize;
1037 /* Could aper size report 0 ? */
1038 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
1039 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
1040 /* Setup GPU memory space */
1041 /* size in MB on evergreen */
1042 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1043 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00001044 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Jerome Glissed594e462010-02-17 21:54:29 +00001045 /* FIXME remove this once we support unmappable VRAM */
1046 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001047 rdev->mc.mc_vram_size = rdev->mc.aper_size;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001048 rdev->mc.real_vram_size = rdev->mc.aper_size;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001049 }
Jerome Glissed594e462010-02-17 21:54:29 +00001050 r600_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04001051 radeon_update_bandwidth_info(rdev);
1052
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001053 return 0;
1054}
Jerome Glissed594e462010-02-17 21:54:29 +00001055
Jerome Glisse225758d2010-03-09 14:45:10 +00001056bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1057{
1058 /* FIXME: implement for evergreen */
1059 return false;
1060}
1061
Alex Deucher747943e2010-03-24 13:26:36 -04001062static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1063{
1064 struct evergreen_mc_save save;
1065 u32 srbm_reset = 0;
1066 u32 grbm_reset = 0;
1067
1068 dev_info(rdev->dev, "GPU softreset \n");
1069 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1070 RREG32(GRBM_STATUS));
1071 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1072 RREG32(GRBM_STATUS_SE0));
1073 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1074 RREG32(GRBM_STATUS_SE1));
1075 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1076 RREG32(SRBM_STATUS));
1077 evergreen_mc_stop(rdev, &save);
1078 if (evergreen_mc_wait_for_idle(rdev)) {
1079 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1080 }
1081 /* Disable CP parsing/prefetching */
1082 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
1083
1084 /* reset all the gfx blocks */
1085 grbm_reset = (SOFT_RESET_CP |
1086 SOFT_RESET_CB |
1087 SOFT_RESET_DB |
1088 SOFT_RESET_PA |
1089 SOFT_RESET_SC |
1090 SOFT_RESET_SPI |
1091 SOFT_RESET_SH |
1092 SOFT_RESET_SX |
1093 SOFT_RESET_TC |
1094 SOFT_RESET_TA |
1095 SOFT_RESET_VC |
1096 SOFT_RESET_VGT);
1097
1098 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
1099 WREG32(GRBM_SOFT_RESET, grbm_reset);
1100 (void)RREG32(GRBM_SOFT_RESET);
1101 udelay(50);
1102 WREG32(GRBM_SOFT_RESET, 0);
1103 (void)RREG32(GRBM_SOFT_RESET);
1104
1105 /* reset all the system blocks */
1106 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
1107
1108 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
1109 WREG32(SRBM_SOFT_RESET, srbm_reset);
1110 (void)RREG32(SRBM_SOFT_RESET);
1111 udelay(50);
1112 WREG32(SRBM_SOFT_RESET, 0);
1113 (void)RREG32(SRBM_SOFT_RESET);
1114 /* Wait a little for things to settle down */
1115 udelay(50);
1116 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1117 RREG32(GRBM_STATUS));
1118 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1119 RREG32(GRBM_STATUS_SE0));
1120 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1121 RREG32(GRBM_STATUS_SE1));
1122 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1123 RREG32(SRBM_STATUS));
1124 /* After reset we need to reinit the asic as GPU often endup in an
1125 * incoherent state.
1126 */
1127 atom_asic_init(rdev->mode_info.atom_context);
1128 evergreen_mc_resume(rdev, &save);
1129 return 0;
1130}
1131
Jerome Glissea2d07b72010-03-09 14:45:11 +00001132int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001133{
Alex Deucher747943e2010-03-24 13:26:36 -04001134 return evergreen_gpu_soft_reset(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001135}
1136
1137static int evergreen_startup(struct radeon_device *rdev)
1138{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001139 int r;
1140
Alex Deucher0fcdb612010-03-24 13:20:41 -04001141#if 0
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001142 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
1143 r = r600_init_microcode(rdev);
1144 if (r) {
1145 DRM_ERROR("Failed to load firmware!\n");
1146 return r;
1147 }
1148 }
1149#endif
1150 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001151 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04001152 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001153 } else {
1154 r = evergreen_pcie_gart_enable(rdev);
1155 if (r)
1156 return r;
1157 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001158 evergreen_gpu_init(rdev);
1159#if 0
1160 if (!rdev->r600_blit.shader_obj) {
1161 r = r600_blit_init(rdev);
1162 if (r) {
1163 DRM_ERROR("radeon: failed blitter (%d).\n", r);
1164 return r;
1165 }
1166 }
1167
1168 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1169 if (unlikely(r != 0))
1170 return r;
1171 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
1172 &rdev->r600_blit.shader_gpu_addr);
1173 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1174 if (r) {
1175 DRM_ERROR("failed to pin blit object %d\n", r);
1176 return r;
1177 }
1178
1179 /* Enable IRQ */
1180 r = r600_irq_init(rdev);
1181 if (r) {
1182 DRM_ERROR("radeon: IH init failed (%d).\n", r);
1183 radeon_irq_kms_fini(rdev);
1184 return r;
1185 }
1186 r600_irq_set(rdev);
1187
1188 r = radeon_ring_init(rdev, rdev->cp.ring_size);
1189 if (r)
1190 return r;
1191 r = evergreen_cp_load_microcode(rdev);
1192 if (r)
1193 return r;
1194 r = r600_cp_resume(rdev);
1195 if (r)
1196 return r;
1197 /* write back buffer are not vital so don't worry about failure */
1198 r600_wb_enable(rdev);
1199#endif
1200 return 0;
1201}
1202
1203int evergreen_resume(struct radeon_device *rdev)
1204{
1205 int r;
1206
1207 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
1208 * posting will perform necessary task to bring back GPU into good
1209 * shape.
1210 */
1211 /* post card */
1212 atom_asic_init(rdev->mode_info.atom_context);
1213 /* Initialize clocks */
1214 r = radeon_clocks_init(rdev);
1215 if (r) {
1216 return r;
1217 }
1218
1219 r = evergreen_startup(rdev);
1220 if (r) {
1221 DRM_ERROR("r600 startup failed on resume\n");
1222 return r;
1223 }
1224#if 0
1225 r = r600_ib_test(rdev);
1226 if (r) {
1227 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
1228 return r;
1229 }
1230#endif
1231 return r;
1232
1233}
1234
1235int evergreen_suspend(struct radeon_device *rdev)
1236{
1237#if 0
1238 int r;
1239
1240 /* FIXME: we should wait for ring to be empty */
1241 r700_cp_stop(rdev);
1242 rdev->cp.ready = false;
1243 r600_wb_disable(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04001244#endif
1245
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001246 evergreen_pcie_gart_disable(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04001247#if 0
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001248 /* unpin shaders bo */
1249 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
1250 if (likely(r == 0)) {
1251 radeon_bo_unpin(rdev->r600_blit.shader_obj);
1252 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
1253 }
1254#endif
1255 return 0;
1256}
1257
1258static bool evergreen_card_posted(struct radeon_device *rdev)
1259{
1260 u32 reg;
1261
1262 /* first check CRTCs */
1263 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
1264 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
1265 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
1266 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
1267 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
1268 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
1269 if (reg & EVERGREEN_CRTC_MASTER_EN)
1270 return true;
1271
1272 /* then check MEM_SIZE, in case the crtcs are off */
1273 if (RREG32(CONFIG_MEMSIZE))
1274 return true;
1275
1276 return false;
1277}
1278
1279/* Plan is to move initialization in that function and use
1280 * helper function so that radeon_device_init pretty much
1281 * do nothing more than calling asic specific function. This
1282 * should also allow to remove a bunch of callback function
1283 * like vram_info.
1284 */
1285int evergreen_init(struct radeon_device *rdev)
1286{
1287 int r;
1288
1289 r = radeon_dummy_page_init(rdev);
1290 if (r)
1291 return r;
1292 /* This don't do much */
1293 r = radeon_gem_init(rdev);
1294 if (r)
1295 return r;
1296 /* Read BIOS */
1297 if (!radeon_get_bios(rdev)) {
1298 if (ASIC_IS_AVIVO(rdev))
1299 return -EINVAL;
1300 }
1301 /* Must be an ATOMBIOS */
1302 if (!rdev->is_atom_bios) {
1303 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
1304 return -EINVAL;
1305 }
1306 r = radeon_atombios_init(rdev);
1307 if (r)
1308 return r;
1309 /* Post card if necessary */
1310 if (!evergreen_card_posted(rdev)) {
1311 if (!rdev->bios) {
1312 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
1313 return -EINVAL;
1314 }
1315 DRM_INFO("GPU not posted. posting now...\n");
1316 atom_asic_init(rdev->mode_info.atom_context);
1317 }
1318 /* Initialize scratch registers */
1319 r600_scratch_init(rdev);
1320 /* Initialize surface registers */
1321 radeon_surface_init(rdev);
1322 /* Initialize clocks */
1323 radeon_get_clock_info(rdev->ddev);
1324 r = radeon_clocks_init(rdev);
1325 if (r)
1326 return r;
1327 /* Initialize power management */
1328 radeon_pm_init(rdev);
1329 /* Fence driver */
1330 r = radeon_fence_driver_init(rdev);
1331 if (r)
1332 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00001333 /* initialize AGP */
1334 if (rdev->flags & RADEON_IS_AGP) {
1335 r = radeon_agp_init(rdev);
1336 if (r)
1337 radeon_agp_disable(rdev);
1338 }
1339 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001340 r = evergreen_mc_init(rdev);
1341 if (r)
1342 return r;
1343 /* Memory manager */
1344 r = radeon_bo_init(rdev);
1345 if (r)
1346 return r;
1347#if 0
1348 r = radeon_irq_kms_init(rdev);
1349 if (r)
1350 return r;
1351
1352 rdev->cp.ring_obj = NULL;
1353 r600_ring_init(rdev, 1024 * 1024);
1354
1355 rdev->ih.ring_obj = NULL;
1356 r600_ih_ring_init(rdev, 64 * 1024);
Alex Deucher0fcdb612010-03-24 13:20:41 -04001357#endif
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001358 r = r600_pcie_gart_init(rdev);
1359 if (r)
1360 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04001361
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001362 rdev->accel_working = false;
1363 r = evergreen_startup(rdev);
1364 if (r) {
1365 evergreen_suspend(rdev);
1366 /*r600_wb_fini(rdev);*/
1367 /*radeon_ring_fini(rdev);*/
Alex Deucher0fcdb612010-03-24 13:20:41 -04001368 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001369 rdev->accel_working = false;
1370 }
1371 if (rdev->accel_working) {
1372 r = radeon_ib_pool_init(rdev);
1373 if (r) {
1374 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
1375 rdev->accel_working = false;
1376 }
1377 r = r600_ib_test(rdev);
1378 if (r) {
1379 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
1380 rdev->accel_working = false;
1381 }
1382 }
1383 return 0;
1384}
1385
1386void evergreen_fini(struct radeon_device *rdev)
1387{
Alex Deucher29fb52c2010-03-11 10:01:17 -05001388 radeon_pm_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001389 evergreen_suspend(rdev);
1390#if 0
1391 r600_blit_fini(rdev);
1392 r600_irq_fini(rdev);
1393 radeon_irq_kms_fini(rdev);
1394 radeon_ring_fini(rdev);
1395 r600_wb_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001396#endif
Alex Deucher0fcdb612010-03-24 13:20:41 -04001397 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001398 radeon_gem_fini(rdev);
1399 radeon_fence_driver_fini(rdev);
1400 radeon_clocks_fini(rdev);
1401 radeon_agp_fini(rdev);
1402 radeon_bo_fini(rdev);
1403 radeon_atombios_fini(rdev);
1404 kfree(rdev->bios);
1405 rdev->bios = NULL;
1406 radeon_dummy_page_fini(rdev);
1407}