Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2010 Advanced Micro Devices, Inc. |
| 3 | * |
| 4 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 5 | * copy of this software and associated documentation files (the "Software"), |
| 6 | * to deal in the Software without restriction, including without limitation |
| 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, |
| 8 | * and/or sell copies of the Software, and to permit persons to whom the |
| 9 | * Software is furnished to do so, subject to the following conditions: |
| 10 | * |
| 11 | * The above copyright notice and this permission notice shall be included in |
| 12 | * all copies or substantial portions of the Software. |
| 13 | * |
| 14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL |
| 17 | * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR |
| 18 | * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
| 19 | * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
| 20 | * OTHER DEALINGS IN THE SOFTWARE. |
| 21 | * |
| 22 | * Authors: Alex Deucher |
| 23 | */ |
| 24 | #include <linux/firmware.h> |
| 25 | #include <linux/platform_device.h> |
| 26 | #include "drmP.h" |
| 27 | #include "radeon.h" |
Daniel Vetter | e699037 | 2010-03-11 21:19:17 +0000 | [diff] [blame] | 28 | #include "radeon_asic.h" |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 29 | #include "radeon_drm.h" |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 30 | #include "evergreend.h" |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 31 | #include "atom.h" |
| 32 | #include "avivod.h" |
| 33 | #include "evergreen_reg.h" |
| 34 | |
| 35 | static void evergreen_gpu_init(struct radeon_device *rdev); |
| 36 | void evergreen_fini(struct radeon_device *rdev); |
| 37 | |
| 38 | bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd) |
| 39 | { |
| 40 | bool connected = false; |
| 41 | /* XXX */ |
| 42 | return connected; |
| 43 | } |
| 44 | |
| 45 | void evergreen_hpd_set_polarity(struct radeon_device *rdev, |
| 46 | enum radeon_hpd_id hpd) |
| 47 | { |
| 48 | /* XXX */ |
| 49 | } |
| 50 | |
| 51 | void evergreen_hpd_init(struct radeon_device *rdev) |
| 52 | { |
| 53 | /* XXX */ |
| 54 | } |
| 55 | |
| 56 | |
| 57 | void evergreen_bandwidth_update(struct radeon_device *rdev) |
| 58 | { |
| 59 | /* XXX */ |
| 60 | } |
| 61 | |
| 62 | void evergreen_hpd_fini(struct radeon_device *rdev) |
| 63 | { |
| 64 | /* XXX */ |
| 65 | } |
| 66 | |
| 67 | static int evergreen_mc_wait_for_idle(struct radeon_device *rdev) |
| 68 | { |
| 69 | unsigned i; |
| 70 | u32 tmp; |
| 71 | |
| 72 | for (i = 0; i < rdev->usec_timeout; i++) { |
| 73 | /* read MC_STATUS */ |
| 74 | tmp = RREG32(SRBM_STATUS) & 0x1F00; |
| 75 | if (!tmp) |
| 76 | return 0; |
| 77 | udelay(1); |
| 78 | } |
| 79 | return -1; |
| 80 | } |
| 81 | |
| 82 | /* |
| 83 | * GART |
| 84 | */ |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 85 | void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev) |
| 86 | { |
| 87 | unsigned i; |
| 88 | u32 tmp; |
| 89 | |
| 90 | WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1)); |
| 91 | for (i = 0; i < rdev->usec_timeout; i++) { |
| 92 | /* read MC_STATUS */ |
| 93 | tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE); |
| 94 | tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT; |
| 95 | if (tmp == 2) { |
| 96 | printk(KERN_WARNING "[drm] r600 flush TLB failed\n"); |
| 97 | return; |
| 98 | } |
| 99 | if (tmp) { |
| 100 | return; |
| 101 | } |
| 102 | udelay(1); |
| 103 | } |
| 104 | } |
| 105 | |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 106 | int evergreen_pcie_gart_enable(struct radeon_device *rdev) |
| 107 | { |
| 108 | u32 tmp; |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 109 | int r; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 110 | |
| 111 | if (rdev->gart.table.vram.robj == NULL) { |
| 112 | dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); |
| 113 | return -EINVAL; |
| 114 | } |
| 115 | r = radeon_gart_table_vram_pin(rdev); |
| 116 | if (r) |
| 117 | return r; |
Dave Airlie | 8256856 | 2010-02-05 16:00:07 +1000 | [diff] [blame] | 118 | radeon_gart_restore(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 119 | /* Setup L2 cache */ |
| 120 | WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | |
| 121 | ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | |
| 122 | EFFECTIVE_L2_QUEUE_SIZE(7)); |
| 123 | WREG32(VM_L2_CNTL2, 0); |
| 124 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); |
| 125 | /* Setup TLB control */ |
| 126 | tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | |
| 127 | SYSTEM_ACCESS_MODE_NOT_IN_SYS | |
| 128 | SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | |
| 129 | EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); |
| 130 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); |
| 131 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); |
| 132 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); |
| 133 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); |
| 134 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); |
| 135 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); |
| 136 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); |
| 137 | WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); |
| 138 | WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); |
| 139 | WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); |
| 140 | WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) | |
| 141 | RANGE_PROTECTION_FAULT_ENABLE_DEFAULT); |
| 142 | WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR, |
| 143 | (u32)(rdev->dummy_page.addr >> 12)); |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 144 | WREG32(VM_CONTEXT1_CNTL, 0); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 145 | |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 146 | evergreen_pcie_gart_tlb_flush(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 147 | rdev->gart.ready = true; |
| 148 | return 0; |
| 149 | } |
| 150 | |
| 151 | void evergreen_pcie_gart_disable(struct radeon_device *rdev) |
| 152 | { |
| 153 | u32 tmp; |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 154 | int r; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 155 | |
| 156 | /* Disable all tables */ |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 157 | WREG32(VM_CONTEXT0_CNTL, 0); |
| 158 | WREG32(VM_CONTEXT1_CNTL, 0); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 159 | |
| 160 | /* Setup L2 cache */ |
| 161 | WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING | |
| 162 | EFFECTIVE_L2_QUEUE_SIZE(7)); |
| 163 | WREG32(VM_L2_CNTL2, 0); |
| 164 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); |
| 165 | /* Setup TLB control */ |
| 166 | tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); |
| 167 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); |
| 168 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); |
| 169 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); |
| 170 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); |
| 171 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); |
| 172 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); |
| 173 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); |
| 174 | if (rdev->gart.table.vram.robj) { |
| 175 | r = radeon_bo_reserve(rdev->gart.table.vram.robj, false); |
| 176 | if (likely(r == 0)) { |
| 177 | radeon_bo_kunmap(rdev->gart.table.vram.robj); |
| 178 | radeon_bo_unpin(rdev->gart.table.vram.robj); |
| 179 | radeon_bo_unreserve(rdev->gart.table.vram.robj); |
| 180 | } |
| 181 | } |
| 182 | } |
| 183 | |
| 184 | void evergreen_pcie_gart_fini(struct radeon_device *rdev) |
| 185 | { |
| 186 | evergreen_pcie_gart_disable(rdev); |
| 187 | radeon_gart_table_vram_free(rdev); |
| 188 | radeon_gart_fini(rdev); |
| 189 | } |
| 190 | |
| 191 | |
| 192 | void evergreen_agp_enable(struct radeon_device *rdev) |
| 193 | { |
| 194 | u32 tmp; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 195 | |
| 196 | /* Setup L2 cache */ |
| 197 | WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING | |
| 198 | ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE | |
| 199 | EFFECTIVE_L2_QUEUE_SIZE(7)); |
| 200 | WREG32(VM_L2_CNTL2, 0); |
| 201 | WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2)); |
| 202 | /* Setup TLB control */ |
| 203 | tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING | |
| 204 | SYSTEM_ACCESS_MODE_NOT_IN_SYS | |
| 205 | SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU | |
| 206 | EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5); |
| 207 | WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp); |
| 208 | WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp); |
| 209 | WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp); |
| 210 | WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp); |
| 211 | WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp); |
| 212 | WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp); |
| 213 | WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp); |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 214 | WREG32(VM_CONTEXT0_CNTL, 0); |
| 215 | WREG32(VM_CONTEXT1_CNTL, 0); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 216 | } |
| 217 | |
| 218 | static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save) |
| 219 | { |
| 220 | save->vga_control[0] = RREG32(D1VGA_CONTROL); |
| 221 | save->vga_control[1] = RREG32(D2VGA_CONTROL); |
| 222 | save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL); |
| 223 | save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL); |
| 224 | save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL); |
| 225 | save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL); |
| 226 | save->vga_render_control = RREG32(VGA_RENDER_CONTROL); |
| 227 | save->vga_hdp_control = RREG32(VGA_HDP_CONTROL); |
| 228 | save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET); |
| 229 | save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET); |
| 230 | save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET); |
| 231 | save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET); |
| 232 | save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET); |
| 233 | save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); |
| 234 | |
| 235 | /* Stop all video */ |
| 236 | WREG32(VGA_RENDER_CONTROL, 0); |
| 237 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); |
| 238 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); |
| 239 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); |
| 240 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); |
| 241 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); |
| 242 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); |
| 243 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); |
| 244 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); |
| 245 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); |
| 246 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); |
| 247 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); |
| 248 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); |
| 249 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); |
| 250 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); |
| 251 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); |
| 252 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); |
| 253 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); |
| 254 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); |
| 255 | |
| 256 | WREG32(D1VGA_CONTROL, 0); |
| 257 | WREG32(D2VGA_CONTROL, 0); |
| 258 | WREG32(EVERGREEN_D3VGA_CONTROL, 0); |
| 259 | WREG32(EVERGREEN_D4VGA_CONTROL, 0); |
| 260 | WREG32(EVERGREEN_D5VGA_CONTROL, 0); |
| 261 | WREG32(EVERGREEN_D6VGA_CONTROL, 0); |
| 262 | } |
| 263 | |
| 264 | static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save) |
| 265 | { |
| 266 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, |
| 267 | upper_32_bits(rdev->mc.vram_start)); |
| 268 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET, |
| 269 | upper_32_bits(rdev->mc.vram_start)); |
| 270 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, |
| 271 | (u32)rdev->mc.vram_start); |
| 272 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET, |
| 273 | (u32)rdev->mc.vram_start); |
| 274 | |
| 275 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, |
| 276 | upper_32_bits(rdev->mc.vram_start)); |
| 277 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET, |
| 278 | upper_32_bits(rdev->mc.vram_start)); |
| 279 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, |
| 280 | (u32)rdev->mc.vram_start); |
| 281 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET, |
| 282 | (u32)rdev->mc.vram_start); |
| 283 | |
| 284 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, |
| 285 | upper_32_bits(rdev->mc.vram_start)); |
| 286 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET, |
| 287 | upper_32_bits(rdev->mc.vram_start)); |
| 288 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, |
| 289 | (u32)rdev->mc.vram_start); |
| 290 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET, |
| 291 | (u32)rdev->mc.vram_start); |
| 292 | |
| 293 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, |
| 294 | upper_32_bits(rdev->mc.vram_start)); |
| 295 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET, |
| 296 | upper_32_bits(rdev->mc.vram_start)); |
| 297 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, |
| 298 | (u32)rdev->mc.vram_start); |
| 299 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET, |
| 300 | (u32)rdev->mc.vram_start); |
| 301 | |
| 302 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, |
| 303 | upper_32_bits(rdev->mc.vram_start)); |
| 304 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET, |
| 305 | upper_32_bits(rdev->mc.vram_start)); |
| 306 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, |
| 307 | (u32)rdev->mc.vram_start); |
| 308 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET, |
| 309 | (u32)rdev->mc.vram_start); |
| 310 | |
| 311 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, |
| 312 | upper_32_bits(rdev->mc.vram_start)); |
| 313 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET, |
| 314 | upper_32_bits(rdev->mc.vram_start)); |
| 315 | WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, |
| 316 | (u32)rdev->mc.vram_start); |
| 317 | WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET, |
| 318 | (u32)rdev->mc.vram_start); |
| 319 | |
| 320 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start)); |
| 321 | WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start); |
| 322 | /* Unlock host access */ |
| 323 | WREG32(VGA_HDP_CONTROL, save->vga_hdp_control); |
| 324 | mdelay(1); |
| 325 | /* Restore video state */ |
| 326 | WREG32(D1VGA_CONTROL, save->vga_control[0]); |
| 327 | WREG32(D2VGA_CONTROL, save->vga_control[1]); |
| 328 | WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]); |
| 329 | WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]); |
| 330 | WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]); |
| 331 | WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]); |
| 332 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1); |
| 333 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1); |
| 334 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1); |
| 335 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1); |
| 336 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1); |
| 337 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1); |
| 338 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]); |
| 339 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]); |
| 340 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]); |
| 341 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]); |
| 342 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]); |
| 343 | WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]); |
| 344 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0); |
| 345 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0); |
| 346 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0); |
| 347 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0); |
| 348 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0); |
| 349 | WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0); |
| 350 | WREG32(VGA_RENDER_CONTROL, save->vga_render_control); |
| 351 | } |
| 352 | |
| 353 | static void evergreen_mc_program(struct radeon_device *rdev) |
| 354 | { |
| 355 | struct evergreen_mc_save save; |
| 356 | u32 tmp; |
| 357 | int i, j; |
| 358 | |
| 359 | /* Initialize HDP */ |
| 360 | for (i = 0, j = 0; i < 32; i++, j += 0x18) { |
| 361 | WREG32((0x2c14 + j), 0x00000000); |
| 362 | WREG32((0x2c18 + j), 0x00000000); |
| 363 | WREG32((0x2c1c + j), 0x00000000); |
| 364 | WREG32((0x2c20 + j), 0x00000000); |
| 365 | WREG32((0x2c24 + j), 0x00000000); |
| 366 | } |
| 367 | WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0); |
| 368 | |
| 369 | evergreen_mc_stop(rdev, &save); |
| 370 | if (evergreen_mc_wait_for_idle(rdev)) { |
| 371 | dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); |
| 372 | } |
| 373 | /* Lockout access through VGA aperture*/ |
| 374 | WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE); |
| 375 | /* Update configuration */ |
| 376 | if (rdev->flags & RADEON_IS_AGP) { |
| 377 | if (rdev->mc.vram_start < rdev->mc.gtt_start) { |
| 378 | /* VRAM before AGP */ |
| 379 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, |
| 380 | rdev->mc.vram_start >> 12); |
| 381 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, |
| 382 | rdev->mc.gtt_end >> 12); |
| 383 | } else { |
| 384 | /* VRAM after AGP */ |
| 385 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, |
| 386 | rdev->mc.gtt_start >> 12); |
| 387 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, |
| 388 | rdev->mc.vram_end >> 12); |
| 389 | } |
| 390 | } else { |
| 391 | WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR, |
| 392 | rdev->mc.vram_start >> 12); |
| 393 | WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR, |
| 394 | rdev->mc.vram_end >> 12); |
| 395 | } |
| 396 | WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0); |
| 397 | tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16; |
| 398 | tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF); |
| 399 | WREG32(MC_VM_FB_LOCATION, tmp); |
| 400 | WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8)); |
| 401 | WREG32(HDP_NONSURFACE_INFO, (2 << 7)); |
| 402 | WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF); |
| 403 | if (rdev->flags & RADEON_IS_AGP) { |
| 404 | WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16); |
| 405 | WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16); |
| 406 | WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22); |
| 407 | } else { |
| 408 | WREG32(MC_VM_AGP_BASE, 0); |
| 409 | WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF); |
| 410 | WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF); |
| 411 | } |
| 412 | if (evergreen_mc_wait_for_idle(rdev)) { |
| 413 | dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); |
| 414 | } |
| 415 | evergreen_mc_resume(rdev, &save); |
| 416 | /* we need to own VRAM, so turn off the VGA renderer here |
| 417 | * to stop it overwriting our objects */ |
| 418 | rv515_vga_render_disable(rdev); |
| 419 | } |
| 420 | |
| 421 | #if 0 |
| 422 | /* |
| 423 | * CP. |
| 424 | */ |
| 425 | static void evergreen_cp_stop(struct radeon_device *rdev) |
| 426 | { |
| 427 | /* XXX */ |
| 428 | } |
| 429 | |
| 430 | |
| 431 | static int evergreen_cp_load_microcode(struct radeon_device *rdev) |
| 432 | { |
| 433 | /* XXX */ |
| 434 | |
| 435 | return 0; |
| 436 | } |
| 437 | |
| 438 | |
| 439 | /* |
| 440 | * Core functions |
| 441 | */ |
| 442 | static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes, |
| 443 | u32 num_backends, |
| 444 | u32 backend_disable_mask) |
| 445 | { |
| 446 | u32 backend_map = 0; |
| 447 | |
| 448 | return backend_map; |
| 449 | } |
| 450 | #endif |
| 451 | |
| 452 | static void evergreen_gpu_init(struct radeon_device *rdev) |
| 453 | { |
| 454 | /* XXX */ |
| 455 | } |
| 456 | |
| 457 | int evergreen_mc_init(struct radeon_device *rdev) |
| 458 | { |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 459 | u32 tmp; |
| 460 | int chansize, numchan; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 461 | |
| 462 | /* Get VRAM informations */ |
| 463 | rdev->mc.vram_is_ddr = true; |
| 464 | tmp = RREG32(MC_ARB_RAMCFG); |
| 465 | if (tmp & CHANSIZE_OVERRIDE) { |
| 466 | chansize = 16; |
| 467 | } else if (tmp & CHANSIZE_MASK) { |
| 468 | chansize = 64; |
| 469 | } else { |
| 470 | chansize = 32; |
| 471 | } |
| 472 | tmp = RREG32(MC_SHARED_CHMAP); |
| 473 | switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) { |
| 474 | case 0: |
| 475 | default: |
| 476 | numchan = 1; |
| 477 | break; |
| 478 | case 1: |
| 479 | numchan = 2; |
| 480 | break; |
| 481 | case 2: |
| 482 | numchan = 4; |
| 483 | break; |
| 484 | case 3: |
| 485 | numchan = 8; |
| 486 | break; |
| 487 | } |
| 488 | rdev->mc.vram_width = numchan * chansize; |
| 489 | /* Could aper size report 0 ? */ |
| 490 | rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0); |
| 491 | rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0); |
| 492 | /* Setup GPU memory space */ |
| 493 | /* size in MB on evergreen */ |
| 494 | rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; |
| 495 | rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024; |
Jerome Glisse | 51e5fcd | 2010-02-19 14:33:54 +0000 | [diff] [blame] | 496 | rdev->mc.visible_vram_size = rdev->mc.aper_size; |
Jerome Glisse | d594e46 | 2010-02-17 21:54:29 +0000 | [diff] [blame] | 497 | /* FIXME remove this once we support unmappable VRAM */ |
| 498 | if (rdev->mc.mc_vram_size > rdev->mc.aper_size) { |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 499 | rdev->mc.mc_vram_size = rdev->mc.aper_size; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 500 | rdev->mc.real_vram_size = rdev->mc.aper_size; |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 501 | } |
Jerome Glisse | d594e46 | 2010-02-17 21:54:29 +0000 | [diff] [blame] | 502 | r600_vram_gtt_location(rdev, &rdev->mc); |
Alex Deucher | f47299c | 2010-03-16 20:54:38 -0400 | [diff] [blame] | 503 | radeon_update_bandwidth_info(rdev); |
| 504 | |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 505 | return 0; |
| 506 | } |
Jerome Glisse | d594e46 | 2010-02-17 21:54:29 +0000 | [diff] [blame] | 507 | |
Jerome Glisse | 225758d | 2010-03-09 14:45:10 +0000 | [diff] [blame] | 508 | bool evergreen_gpu_is_lockup(struct radeon_device *rdev) |
| 509 | { |
| 510 | /* FIXME: implement for evergreen */ |
| 511 | return false; |
| 512 | } |
| 513 | |
Alex Deucher | 747943e | 2010-03-24 13:26:36 -0400 | [diff] [blame^] | 514 | static int evergreen_gpu_soft_reset(struct radeon_device *rdev) |
| 515 | { |
| 516 | struct evergreen_mc_save save; |
| 517 | u32 srbm_reset = 0; |
| 518 | u32 grbm_reset = 0; |
| 519 | |
| 520 | dev_info(rdev->dev, "GPU softreset \n"); |
| 521 | dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", |
| 522 | RREG32(GRBM_STATUS)); |
| 523 | dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", |
| 524 | RREG32(GRBM_STATUS_SE0)); |
| 525 | dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", |
| 526 | RREG32(GRBM_STATUS_SE1)); |
| 527 | dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", |
| 528 | RREG32(SRBM_STATUS)); |
| 529 | evergreen_mc_stop(rdev, &save); |
| 530 | if (evergreen_mc_wait_for_idle(rdev)) { |
| 531 | dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); |
| 532 | } |
| 533 | /* Disable CP parsing/prefetching */ |
| 534 | WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT); |
| 535 | |
| 536 | /* reset all the gfx blocks */ |
| 537 | grbm_reset = (SOFT_RESET_CP | |
| 538 | SOFT_RESET_CB | |
| 539 | SOFT_RESET_DB | |
| 540 | SOFT_RESET_PA | |
| 541 | SOFT_RESET_SC | |
| 542 | SOFT_RESET_SPI | |
| 543 | SOFT_RESET_SH | |
| 544 | SOFT_RESET_SX | |
| 545 | SOFT_RESET_TC | |
| 546 | SOFT_RESET_TA | |
| 547 | SOFT_RESET_VC | |
| 548 | SOFT_RESET_VGT); |
| 549 | |
| 550 | dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset); |
| 551 | WREG32(GRBM_SOFT_RESET, grbm_reset); |
| 552 | (void)RREG32(GRBM_SOFT_RESET); |
| 553 | udelay(50); |
| 554 | WREG32(GRBM_SOFT_RESET, 0); |
| 555 | (void)RREG32(GRBM_SOFT_RESET); |
| 556 | |
| 557 | /* reset all the system blocks */ |
| 558 | srbm_reset = SRBM_SOFT_RESET_ALL_MASK; |
| 559 | |
| 560 | dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset); |
| 561 | WREG32(SRBM_SOFT_RESET, srbm_reset); |
| 562 | (void)RREG32(SRBM_SOFT_RESET); |
| 563 | udelay(50); |
| 564 | WREG32(SRBM_SOFT_RESET, 0); |
| 565 | (void)RREG32(SRBM_SOFT_RESET); |
| 566 | /* Wait a little for things to settle down */ |
| 567 | udelay(50); |
| 568 | dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n", |
| 569 | RREG32(GRBM_STATUS)); |
| 570 | dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n", |
| 571 | RREG32(GRBM_STATUS_SE0)); |
| 572 | dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n", |
| 573 | RREG32(GRBM_STATUS_SE1)); |
| 574 | dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n", |
| 575 | RREG32(SRBM_STATUS)); |
| 576 | /* After reset we need to reinit the asic as GPU often endup in an |
| 577 | * incoherent state. |
| 578 | */ |
| 579 | atom_asic_init(rdev->mode_info.atom_context); |
| 580 | evergreen_mc_resume(rdev, &save); |
| 581 | return 0; |
| 582 | } |
| 583 | |
Jerome Glisse | a2d07b7 | 2010-03-09 14:45:11 +0000 | [diff] [blame] | 584 | int evergreen_asic_reset(struct radeon_device *rdev) |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 585 | { |
Alex Deucher | 747943e | 2010-03-24 13:26:36 -0400 | [diff] [blame^] | 586 | return evergreen_gpu_soft_reset(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 587 | } |
| 588 | |
| 589 | static int evergreen_startup(struct radeon_device *rdev) |
| 590 | { |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 591 | int r; |
| 592 | |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 593 | #if 0 |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 594 | if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { |
| 595 | r = r600_init_microcode(rdev); |
| 596 | if (r) { |
| 597 | DRM_ERROR("Failed to load firmware!\n"); |
| 598 | return r; |
| 599 | } |
| 600 | } |
| 601 | #endif |
| 602 | evergreen_mc_program(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 603 | if (rdev->flags & RADEON_IS_AGP) { |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 604 | evergreen_agp_enable(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 605 | } else { |
| 606 | r = evergreen_pcie_gart_enable(rdev); |
| 607 | if (r) |
| 608 | return r; |
| 609 | } |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 610 | evergreen_gpu_init(rdev); |
| 611 | #if 0 |
| 612 | if (!rdev->r600_blit.shader_obj) { |
| 613 | r = r600_blit_init(rdev); |
| 614 | if (r) { |
| 615 | DRM_ERROR("radeon: failed blitter (%d).\n", r); |
| 616 | return r; |
| 617 | } |
| 618 | } |
| 619 | |
| 620 | r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); |
| 621 | if (unlikely(r != 0)) |
| 622 | return r; |
| 623 | r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM, |
| 624 | &rdev->r600_blit.shader_gpu_addr); |
| 625 | radeon_bo_unreserve(rdev->r600_blit.shader_obj); |
| 626 | if (r) { |
| 627 | DRM_ERROR("failed to pin blit object %d\n", r); |
| 628 | return r; |
| 629 | } |
| 630 | |
| 631 | /* Enable IRQ */ |
| 632 | r = r600_irq_init(rdev); |
| 633 | if (r) { |
| 634 | DRM_ERROR("radeon: IH init failed (%d).\n", r); |
| 635 | radeon_irq_kms_fini(rdev); |
| 636 | return r; |
| 637 | } |
| 638 | r600_irq_set(rdev); |
| 639 | |
| 640 | r = radeon_ring_init(rdev, rdev->cp.ring_size); |
| 641 | if (r) |
| 642 | return r; |
| 643 | r = evergreen_cp_load_microcode(rdev); |
| 644 | if (r) |
| 645 | return r; |
| 646 | r = r600_cp_resume(rdev); |
| 647 | if (r) |
| 648 | return r; |
| 649 | /* write back buffer are not vital so don't worry about failure */ |
| 650 | r600_wb_enable(rdev); |
| 651 | #endif |
| 652 | return 0; |
| 653 | } |
| 654 | |
| 655 | int evergreen_resume(struct radeon_device *rdev) |
| 656 | { |
| 657 | int r; |
| 658 | |
| 659 | /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw, |
| 660 | * posting will perform necessary task to bring back GPU into good |
| 661 | * shape. |
| 662 | */ |
| 663 | /* post card */ |
| 664 | atom_asic_init(rdev->mode_info.atom_context); |
| 665 | /* Initialize clocks */ |
| 666 | r = radeon_clocks_init(rdev); |
| 667 | if (r) { |
| 668 | return r; |
| 669 | } |
| 670 | |
| 671 | r = evergreen_startup(rdev); |
| 672 | if (r) { |
| 673 | DRM_ERROR("r600 startup failed on resume\n"); |
| 674 | return r; |
| 675 | } |
| 676 | #if 0 |
| 677 | r = r600_ib_test(rdev); |
| 678 | if (r) { |
| 679 | DRM_ERROR("radeon: failled testing IB (%d).\n", r); |
| 680 | return r; |
| 681 | } |
| 682 | #endif |
| 683 | return r; |
| 684 | |
| 685 | } |
| 686 | |
| 687 | int evergreen_suspend(struct radeon_device *rdev) |
| 688 | { |
| 689 | #if 0 |
| 690 | int r; |
| 691 | |
| 692 | /* FIXME: we should wait for ring to be empty */ |
| 693 | r700_cp_stop(rdev); |
| 694 | rdev->cp.ready = false; |
| 695 | r600_wb_disable(rdev); |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 696 | #endif |
| 697 | |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 698 | evergreen_pcie_gart_disable(rdev); |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 699 | #if 0 |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 700 | /* unpin shaders bo */ |
| 701 | r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false); |
| 702 | if (likely(r == 0)) { |
| 703 | radeon_bo_unpin(rdev->r600_blit.shader_obj); |
| 704 | radeon_bo_unreserve(rdev->r600_blit.shader_obj); |
| 705 | } |
| 706 | #endif |
| 707 | return 0; |
| 708 | } |
| 709 | |
| 710 | static bool evergreen_card_posted(struct radeon_device *rdev) |
| 711 | { |
| 712 | u32 reg; |
| 713 | |
| 714 | /* first check CRTCs */ |
| 715 | reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) | |
| 716 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) | |
| 717 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) | |
| 718 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) | |
| 719 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) | |
| 720 | RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET); |
| 721 | if (reg & EVERGREEN_CRTC_MASTER_EN) |
| 722 | return true; |
| 723 | |
| 724 | /* then check MEM_SIZE, in case the crtcs are off */ |
| 725 | if (RREG32(CONFIG_MEMSIZE)) |
| 726 | return true; |
| 727 | |
| 728 | return false; |
| 729 | } |
| 730 | |
| 731 | /* Plan is to move initialization in that function and use |
| 732 | * helper function so that radeon_device_init pretty much |
| 733 | * do nothing more than calling asic specific function. This |
| 734 | * should also allow to remove a bunch of callback function |
| 735 | * like vram_info. |
| 736 | */ |
| 737 | int evergreen_init(struct radeon_device *rdev) |
| 738 | { |
| 739 | int r; |
| 740 | |
| 741 | r = radeon_dummy_page_init(rdev); |
| 742 | if (r) |
| 743 | return r; |
| 744 | /* This don't do much */ |
| 745 | r = radeon_gem_init(rdev); |
| 746 | if (r) |
| 747 | return r; |
| 748 | /* Read BIOS */ |
| 749 | if (!radeon_get_bios(rdev)) { |
| 750 | if (ASIC_IS_AVIVO(rdev)) |
| 751 | return -EINVAL; |
| 752 | } |
| 753 | /* Must be an ATOMBIOS */ |
| 754 | if (!rdev->is_atom_bios) { |
| 755 | dev_err(rdev->dev, "Expecting atombios for R600 GPU\n"); |
| 756 | return -EINVAL; |
| 757 | } |
| 758 | r = radeon_atombios_init(rdev); |
| 759 | if (r) |
| 760 | return r; |
| 761 | /* Post card if necessary */ |
| 762 | if (!evergreen_card_posted(rdev)) { |
| 763 | if (!rdev->bios) { |
| 764 | dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); |
| 765 | return -EINVAL; |
| 766 | } |
| 767 | DRM_INFO("GPU not posted. posting now...\n"); |
| 768 | atom_asic_init(rdev->mode_info.atom_context); |
| 769 | } |
| 770 | /* Initialize scratch registers */ |
| 771 | r600_scratch_init(rdev); |
| 772 | /* Initialize surface registers */ |
| 773 | radeon_surface_init(rdev); |
| 774 | /* Initialize clocks */ |
| 775 | radeon_get_clock_info(rdev->ddev); |
| 776 | r = radeon_clocks_init(rdev); |
| 777 | if (r) |
| 778 | return r; |
| 779 | /* Initialize power management */ |
| 780 | radeon_pm_init(rdev); |
| 781 | /* Fence driver */ |
| 782 | r = radeon_fence_driver_init(rdev); |
| 783 | if (r) |
| 784 | return r; |
Jerome Glisse | d594e46 | 2010-02-17 21:54:29 +0000 | [diff] [blame] | 785 | /* initialize AGP */ |
| 786 | if (rdev->flags & RADEON_IS_AGP) { |
| 787 | r = radeon_agp_init(rdev); |
| 788 | if (r) |
| 789 | radeon_agp_disable(rdev); |
| 790 | } |
| 791 | /* initialize memory controller */ |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 792 | r = evergreen_mc_init(rdev); |
| 793 | if (r) |
| 794 | return r; |
| 795 | /* Memory manager */ |
| 796 | r = radeon_bo_init(rdev); |
| 797 | if (r) |
| 798 | return r; |
| 799 | #if 0 |
| 800 | r = radeon_irq_kms_init(rdev); |
| 801 | if (r) |
| 802 | return r; |
| 803 | |
| 804 | rdev->cp.ring_obj = NULL; |
| 805 | r600_ring_init(rdev, 1024 * 1024); |
| 806 | |
| 807 | rdev->ih.ring_obj = NULL; |
| 808 | r600_ih_ring_init(rdev, 64 * 1024); |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 809 | #endif |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 810 | r = r600_pcie_gart_init(rdev); |
| 811 | if (r) |
| 812 | return r; |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 813 | |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 814 | rdev->accel_working = false; |
| 815 | r = evergreen_startup(rdev); |
| 816 | if (r) { |
| 817 | evergreen_suspend(rdev); |
| 818 | /*r600_wb_fini(rdev);*/ |
| 819 | /*radeon_ring_fini(rdev);*/ |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 820 | evergreen_pcie_gart_fini(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 821 | rdev->accel_working = false; |
| 822 | } |
| 823 | if (rdev->accel_working) { |
| 824 | r = radeon_ib_pool_init(rdev); |
| 825 | if (r) { |
| 826 | DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r); |
| 827 | rdev->accel_working = false; |
| 828 | } |
| 829 | r = r600_ib_test(rdev); |
| 830 | if (r) { |
| 831 | DRM_ERROR("radeon: failed testing IB (%d).\n", r); |
| 832 | rdev->accel_working = false; |
| 833 | } |
| 834 | } |
| 835 | return 0; |
| 836 | } |
| 837 | |
| 838 | void evergreen_fini(struct radeon_device *rdev) |
| 839 | { |
Alex Deucher | 29fb52c | 2010-03-11 10:01:17 -0500 | [diff] [blame] | 840 | radeon_pm_fini(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 841 | evergreen_suspend(rdev); |
| 842 | #if 0 |
| 843 | r600_blit_fini(rdev); |
| 844 | r600_irq_fini(rdev); |
| 845 | radeon_irq_kms_fini(rdev); |
| 846 | radeon_ring_fini(rdev); |
| 847 | r600_wb_fini(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 848 | #endif |
Alex Deucher | 0fcdb61 | 2010-03-24 13:20:41 -0400 | [diff] [blame] | 849 | evergreen_pcie_gart_fini(rdev); |
Alex Deucher | bcc1c2a | 2010-01-12 17:54:34 -0500 | [diff] [blame] | 850 | radeon_gem_fini(rdev); |
| 851 | radeon_fence_driver_fini(rdev); |
| 852 | radeon_clocks_fini(rdev); |
| 853 | radeon_agp_fini(rdev); |
| 854 | radeon_bo_fini(rdev); |
| 855 | radeon_atombios_fini(rdev); |
| 856 | kfree(rdev->bios); |
| 857 | rdev->bios = NULL; |
| 858 | radeon_dummy_page_fini(rdev); |
| 859 | } |