blob: 6b559cb5383788f79cdc62e200ea30829e08c5e9 [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
David Howells760285e2012-10-02 18:01:07 +010027#include <drm/drmP.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050028#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
David Howells760285e2012-10-02 18:01:07 +010030#include <drm/radeon_drm.h>
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
Alex Deucher2281a372010-10-21 13:31:38 -040035#include "evergreen_blit_shaders.h"
Alex Deucher138e4e12013-01-11 15:33:13 -050036#include "radeon_ucode.h"
Alex Deucherfe251e22010-03-24 13:36:43 -040037
Alex Deucher4a159032012-08-15 17:13:53 -040038static const u32 crtc_offsets[6] =
39{
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
46};
47
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050048static void evergreen_gpu_init(struct radeon_device *rdev);
49void evergreen_fini(struct radeon_device *rdev);
Ilija Hadzicb07759b2011-09-20 10:22:58 -040050void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
Alex Deucher1b370782011-11-17 20:13:28 -050051extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
52 int ring, u32 cp_int_cntl);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050053
Alex Deucherd4788db2013-02-28 14:40:09 -050054static const u32 evergreen_golden_registers[] =
55{
56 0x3f90, 0xffff0000, 0xff000000,
57 0x9148, 0xffff0000, 0xff000000,
58 0x3f94, 0xffff0000, 0xff000000,
59 0x914c, 0xffff0000, 0xff000000,
60 0x9b7c, 0xffffffff, 0x00000000,
61 0x8a14, 0xffffffff, 0x00000007,
62 0x8b10, 0xffffffff, 0x00000000,
63 0x960c, 0xffffffff, 0x54763210,
64 0x88c4, 0xffffffff, 0x000000c2,
65 0x88d4, 0xffffffff, 0x00000010,
66 0x8974, 0xffffffff, 0x00000000,
67 0xc78, 0x00000080, 0x00000080,
68 0x5eb4, 0xffffffff, 0x00000002,
69 0x5e78, 0xffffffff, 0x001000f0,
70 0x6104, 0x01000300, 0x00000000,
71 0x5bc0, 0x00300000, 0x00000000,
72 0x7030, 0xffffffff, 0x00000011,
73 0x7c30, 0xffffffff, 0x00000011,
74 0x10830, 0xffffffff, 0x00000011,
75 0x11430, 0xffffffff, 0x00000011,
76 0x12030, 0xffffffff, 0x00000011,
77 0x12c30, 0xffffffff, 0x00000011,
78 0xd02c, 0xffffffff, 0x08421000,
79 0x240c, 0xffffffff, 0x00000380,
80 0x8b24, 0xffffffff, 0x00ff0fff,
81 0x28a4c, 0x06000000, 0x06000000,
82 0x10c, 0x00000001, 0x00000001,
83 0x8d00, 0xffffffff, 0x100e4848,
84 0x8d04, 0xffffffff, 0x00164745,
85 0x8c00, 0xffffffff, 0xe4000003,
86 0x8c04, 0xffffffff, 0x40600060,
87 0x8c08, 0xffffffff, 0x001c001c,
88 0x8cf0, 0xffffffff, 0x08e00620,
89 0x8c20, 0xffffffff, 0x00800080,
90 0x8c24, 0xffffffff, 0x00800080,
91 0x8c18, 0xffffffff, 0x20202078,
92 0x8c1c, 0xffffffff, 0x00001010,
93 0x28350, 0xffffffff, 0x00000000,
94 0xa008, 0xffffffff, 0x00010000,
95 0x5cc, 0xffffffff, 0x00000001,
96 0x9508, 0xffffffff, 0x00000002,
97 0x913c, 0x0000000f, 0x0000000a
98};
99
100static const u32 evergreen_golden_registers2[] =
101{
102 0x2f4c, 0xffffffff, 0x00000000,
103 0x54f4, 0xffffffff, 0x00000000,
104 0x54f0, 0xffffffff, 0x00000000,
105 0x5498, 0xffffffff, 0x00000000,
106 0x549c, 0xffffffff, 0x00000000,
107 0x5494, 0xffffffff, 0x00000000,
108 0x53cc, 0xffffffff, 0x00000000,
109 0x53c8, 0xffffffff, 0x00000000,
110 0x53c4, 0xffffffff, 0x00000000,
111 0x53c0, 0xffffffff, 0x00000000,
112 0x53bc, 0xffffffff, 0x00000000,
113 0x53b8, 0xffffffff, 0x00000000,
114 0x53b4, 0xffffffff, 0x00000000,
115 0x53b0, 0xffffffff, 0x00000000
116};
117
118static const u32 cypress_mgcg_init[] =
119{
120 0x802c, 0xffffffff, 0xc0000000,
121 0x5448, 0xffffffff, 0x00000100,
122 0x55e4, 0xffffffff, 0x00000100,
123 0x160c, 0xffffffff, 0x00000100,
124 0x5644, 0xffffffff, 0x00000100,
125 0xc164, 0xffffffff, 0x00000100,
126 0x8a18, 0xffffffff, 0x00000100,
127 0x897c, 0xffffffff, 0x06000100,
128 0x8b28, 0xffffffff, 0x00000100,
129 0x9144, 0xffffffff, 0x00000100,
130 0x9a60, 0xffffffff, 0x00000100,
131 0x9868, 0xffffffff, 0x00000100,
132 0x8d58, 0xffffffff, 0x00000100,
133 0x9510, 0xffffffff, 0x00000100,
134 0x949c, 0xffffffff, 0x00000100,
135 0x9654, 0xffffffff, 0x00000100,
136 0x9030, 0xffffffff, 0x00000100,
137 0x9034, 0xffffffff, 0x00000100,
138 0x9038, 0xffffffff, 0x00000100,
139 0x903c, 0xffffffff, 0x00000100,
140 0x9040, 0xffffffff, 0x00000100,
141 0xa200, 0xffffffff, 0x00000100,
142 0xa204, 0xffffffff, 0x00000100,
143 0xa208, 0xffffffff, 0x00000100,
144 0xa20c, 0xffffffff, 0x00000100,
145 0x971c, 0xffffffff, 0x00000100,
146 0x977c, 0xffffffff, 0x00000100,
147 0x3f80, 0xffffffff, 0x00000100,
148 0xa210, 0xffffffff, 0x00000100,
149 0xa214, 0xffffffff, 0x00000100,
150 0x4d8, 0xffffffff, 0x00000100,
151 0x9784, 0xffffffff, 0x00000100,
152 0x9698, 0xffffffff, 0x00000100,
153 0x4d4, 0xffffffff, 0x00000200,
154 0x30cc, 0xffffffff, 0x00000100,
155 0xd0c0, 0xffffffff, 0xff000100,
156 0x802c, 0xffffffff, 0x40000000,
157 0x915c, 0xffffffff, 0x00010000,
158 0x9160, 0xffffffff, 0x00030002,
159 0x9178, 0xffffffff, 0x00070000,
160 0x917c, 0xffffffff, 0x00030002,
161 0x9180, 0xffffffff, 0x00050004,
162 0x918c, 0xffffffff, 0x00010006,
163 0x9190, 0xffffffff, 0x00090008,
164 0x9194, 0xffffffff, 0x00070000,
165 0x9198, 0xffffffff, 0x00030002,
166 0x919c, 0xffffffff, 0x00050004,
167 0x91a8, 0xffffffff, 0x00010006,
168 0x91ac, 0xffffffff, 0x00090008,
169 0x91b0, 0xffffffff, 0x00070000,
170 0x91b4, 0xffffffff, 0x00030002,
171 0x91b8, 0xffffffff, 0x00050004,
172 0x91c4, 0xffffffff, 0x00010006,
173 0x91c8, 0xffffffff, 0x00090008,
174 0x91cc, 0xffffffff, 0x00070000,
175 0x91d0, 0xffffffff, 0x00030002,
176 0x91d4, 0xffffffff, 0x00050004,
177 0x91e0, 0xffffffff, 0x00010006,
178 0x91e4, 0xffffffff, 0x00090008,
179 0x91e8, 0xffffffff, 0x00000000,
180 0x91ec, 0xffffffff, 0x00070000,
181 0x91f0, 0xffffffff, 0x00030002,
182 0x91f4, 0xffffffff, 0x00050004,
183 0x9200, 0xffffffff, 0x00010006,
184 0x9204, 0xffffffff, 0x00090008,
185 0x9208, 0xffffffff, 0x00070000,
186 0x920c, 0xffffffff, 0x00030002,
187 0x9210, 0xffffffff, 0x00050004,
188 0x921c, 0xffffffff, 0x00010006,
189 0x9220, 0xffffffff, 0x00090008,
190 0x9224, 0xffffffff, 0x00070000,
191 0x9228, 0xffffffff, 0x00030002,
192 0x922c, 0xffffffff, 0x00050004,
193 0x9238, 0xffffffff, 0x00010006,
194 0x923c, 0xffffffff, 0x00090008,
195 0x9240, 0xffffffff, 0x00070000,
196 0x9244, 0xffffffff, 0x00030002,
197 0x9248, 0xffffffff, 0x00050004,
198 0x9254, 0xffffffff, 0x00010006,
199 0x9258, 0xffffffff, 0x00090008,
200 0x925c, 0xffffffff, 0x00070000,
201 0x9260, 0xffffffff, 0x00030002,
202 0x9264, 0xffffffff, 0x00050004,
203 0x9270, 0xffffffff, 0x00010006,
204 0x9274, 0xffffffff, 0x00090008,
205 0x9278, 0xffffffff, 0x00070000,
206 0x927c, 0xffffffff, 0x00030002,
207 0x9280, 0xffffffff, 0x00050004,
208 0x928c, 0xffffffff, 0x00010006,
209 0x9290, 0xffffffff, 0x00090008,
210 0x9294, 0xffffffff, 0x00000000,
211 0x929c, 0xffffffff, 0x00000001,
212 0x802c, 0xffffffff, 0x40010000,
213 0x915c, 0xffffffff, 0x00010000,
214 0x9160, 0xffffffff, 0x00030002,
215 0x9178, 0xffffffff, 0x00070000,
216 0x917c, 0xffffffff, 0x00030002,
217 0x9180, 0xffffffff, 0x00050004,
218 0x918c, 0xffffffff, 0x00010006,
219 0x9190, 0xffffffff, 0x00090008,
220 0x9194, 0xffffffff, 0x00070000,
221 0x9198, 0xffffffff, 0x00030002,
222 0x919c, 0xffffffff, 0x00050004,
223 0x91a8, 0xffffffff, 0x00010006,
224 0x91ac, 0xffffffff, 0x00090008,
225 0x91b0, 0xffffffff, 0x00070000,
226 0x91b4, 0xffffffff, 0x00030002,
227 0x91b8, 0xffffffff, 0x00050004,
228 0x91c4, 0xffffffff, 0x00010006,
229 0x91c8, 0xffffffff, 0x00090008,
230 0x91cc, 0xffffffff, 0x00070000,
231 0x91d0, 0xffffffff, 0x00030002,
232 0x91d4, 0xffffffff, 0x00050004,
233 0x91e0, 0xffffffff, 0x00010006,
234 0x91e4, 0xffffffff, 0x00090008,
235 0x91e8, 0xffffffff, 0x00000000,
236 0x91ec, 0xffffffff, 0x00070000,
237 0x91f0, 0xffffffff, 0x00030002,
238 0x91f4, 0xffffffff, 0x00050004,
239 0x9200, 0xffffffff, 0x00010006,
240 0x9204, 0xffffffff, 0x00090008,
241 0x9208, 0xffffffff, 0x00070000,
242 0x920c, 0xffffffff, 0x00030002,
243 0x9210, 0xffffffff, 0x00050004,
244 0x921c, 0xffffffff, 0x00010006,
245 0x9220, 0xffffffff, 0x00090008,
246 0x9224, 0xffffffff, 0x00070000,
247 0x9228, 0xffffffff, 0x00030002,
248 0x922c, 0xffffffff, 0x00050004,
249 0x9238, 0xffffffff, 0x00010006,
250 0x923c, 0xffffffff, 0x00090008,
251 0x9240, 0xffffffff, 0x00070000,
252 0x9244, 0xffffffff, 0x00030002,
253 0x9248, 0xffffffff, 0x00050004,
254 0x9254, 0xffffffff, 0x00010006,
255 0x9258, 0xffffffff, 0x00090008,
256 0x925c, 0xffffffff, 0x00070000,
257 0x9260, 0xffffffff, 0x00030002,
258 0x9264, 0xffffffff, 0x00050004,
259 0x9270, 0xffffffff, 0x00010006,
260 0x9274, 0xffffffff, 0x00090008,
261 0x9278, 0xffffffff, 0x00070000,
262 0x927c, 0xffffffff, 0x00030002,
263 0x9280, 0xffffffff, 0x00050004,
264 0x928c, 0xffffffff, 0x00010006,
265 0x9290, 0xffffffff, 0x00090008,
266 0x9294, 0xffffffff, 0x00000000,
267 0x929c, 0xffffffff, 0x00000001,
268 0x802c, 0xffffffff, 0xc0000000
269};
270
271static const u32 redwood_mgcg_init[] =
272{
273 0x802c, 0xffffffff, 0xc0000000,
274 0x5448, 0xffffffff, 0x00000100,
275 0x55e4, 0xffffffff, 0x00000100,
276 0x160c, 0xffffffff, 0x00000100,
277 0x5644, 0xffffffff, 0x00000100,
278 0xc164, 0xffffffff, 0x00000100,
279 0x8a18, 0xffffffff, 0x00000100,
280 0x897c, 0xffffffff, 0x06000100,
281 0x8b28, 0xffffffff, 0x00000100,
282 0x9144, 0xffffffff, 0x00000100,
283 0x9a60, 0xffffffff, 0x00000100,
284 0x9868, 0xffffffff, 0x00000100,
285 0x8d58, 0xffffffff, 0x00000100,
286 0x9510, 0xffffffff, 0x00000100,
287 0x949c, 0xffffffff, 0x00000100,
288 0x9654, 0xffffffff, 0x00000100,
289 0x9030, 0xffffffff, 0x00000100,
290 0x9034, 0xffffffff, 0x00000100,
291 0x9038, 0xffffffff, 0x00000100,
292 0x903c, 0xffffffff, 0x00000100,
293 0x9040, 0xffffffff, 0x00000100,
294 0xa200, 0xffffffff, 0x00000100,
295 0xa204, 0xffffffff, 0x00000100,
296 0xa208, 0xffffffff, 0x00000100,
297 0xa20c, 0xffffffff, 0x00000100,
298 0x971c, 0xffffffff, 0x00000100,
299 0x977c, 0xffffffff, 0x00000100,
300 0x3f80, 0xffffffff, 0x00000100,
301 0xa210, 0xffffffff, 0x00000100,
302 0xa214, 0xffffffff, 0x00000100,
303 0x4d8, 0xffffffff, 0x00000100,
304 0x9784, 0xffffffff, 0x00000100,
305 0x9698, 0xffffffff, 0x00000100,
306 0x4d4, 0xffffffff, 0x00000200,
307 0x30cc, 0xffffffff, 0x00000100,
308 0xd0c0, 0xffffffff, 0xff000100,
309 0x802c, 0xffffffff, 0x40000000,
310 0x915c, 0xffffffff, 0x00010000,
311 0x9160, 0xffffffff, 0x00030002,
312 0x9178, 0xffffffff, 0x00070000,
313 0x917c, 0xffffffff, 0x00030002,
314 0x9180, 0xffffffff, 0x00050004,
315 0x918c, 0xffffffff, 0x00010006,
316 0x9190, 0xffffffff, 0x00090008,
317 0x9194, 0xffffffff, 0x00070000,
318 0x9198, 0xffffffff, 0x00030002,
319 0x919c, 0xffffffff, 0x00050004,
320 0x91a8, 0xffffffff, 0x00010006,
321 0x91ac, 0xffffffff, 0x00090008,
322 0x91b0, 0xffffffff, 0x00070000,
323 0x91b4, 0xffffffff, 0x00030002,
324 0x91b8, 0xffffffff, 0x00050004,
325 0x91c4, 0xffffffff, 0x00010006,
326 0x91c8, 0xffffffff, 0x00090008,
327 0x91cc, 0xffffffff, 0x00070000,
328 0x91d0, 0xffffffff, 0x00030002,
329 0x91d4, 0xffffffff, 0x00050004,
330 0x91e0, 0xffffffff, 0x00010006,
331 0x91e4, 0xffffffff, 0x00090008,
332 0x91e8, 0xffffffff, 0x00000000,
333 0x91ec, 0xffffffff, 0x00070000,
334 0x91f0, 0xffffffff, 0x00030002,
335 0x91f4, 0xffffffff, 0x00050004,
336 0x9200, 0xffffffff, 0x00010006,
337 0x9204, 0xffffffff, 0x00090008,
338 0x9294, 0xffffffff, 0x00000000,
339 0x929c, 0xffffffff, 0x00000001,
340 0x802c, 0xffffffff, 0xc0000000
341};
342
343static const u32 cedar_golden_registers[] =
344{
345 0x3f90, 0xffff0000, 0xff000000,
346 0x9148, 0xffff0000, 0xff000000,
347 0x3f94, 0xffff0000, 0xff000000,
348 0x914c, 0xffff0000, 0xff000000,
349 0x9b7c, 0xffffffff, 0x00000000,
350 0x8a14, 0xffffffff, 0x00000007,
351 0x8b10, 0xffffffff, 0x00000000,
352 0x960c, 0xffffffff, 0x54763210,
353 0x88c4, 0xffffffff, 0x000000c2,
354 0x88d4, 0xffffffff, 0x00000000,
355 0x8974, 0xffffffff, 0x00000000,
356 0xc78, 0x00000080, 0x00000080,
357 0x5eb4, 0xffffffff, 0x00000002,
358 0x5e78, 0xffffffff, 0x001000f0,
359 0x6104, 0x01000300, 0x00000000,
360 0x5bc0, 0x00300000, 0x00000000,
361 0x7030, 0xffffffff, 0x00000011,
362 0x7c30, 0xffffffff, 0x00000011,
363 0x10830, 0xffffffff, 0x00000011,
364 0x11430, 0xffffffff, 0x00000011,
365 0xd02c, 0xffffffff, 0x08421000,
366 0x240c, 0xffffffff, 0x00000380,
367 0x8b24, 0xffffffff, 0x00ff0fff,
368 0x28a4c, 0x06000000, 0x06000000,
369 0x10c, 0x00000001, 0x00000001,
370 0x8d00, 0xffffffff, 0x100e4848,
371 0x8d04, 0xffffffff, 0x00164745,
372 0x8c00, 0xffffffff, 0xe4000003,
373 0x8c04, 0xffffffff, 0x40600060,
374 0x8c08, 0xffffffff, 0x001c001c,
375 0x8cf0, 0xffffffff, 0x08e00410,
376 0x8c20, 0xffffffff, 0x00800080,
377 0x8c24, 0xffffffff, 0x00800080,
378 0x8c18, 0xffffffff, 0x20202078,
379 0x8c1c, 0xffffffff, 0x00001010,
380 0x28350, 0xffffffff, 0x00000000,
381 0xa008, 0xffffffff, 0x00010000,
382 0x5cc, 0xffffffff, 0x00000001,
383 0x9508, 0xffffffff, 0x00000002
384};
385
386static const u32 cedar_mgcg_init[] =
387{
388 0x802c, 0xffffffff, 0xc0000000,
389 0x5448, 0xffffffff, 0x00000100,
390 0x55e4, 0xffffffff, 0x00000100,
391 0x160c, 0xffffffff, 0x00000100,
392 0x5644, 0xffffffff, 0x00000100,
393 0xc164, 0xffffffff, 0x00000100,
394 0x8a18, 0xffffffff, 0x00000100,
395 0x897c, 0xffffffff, 0x06000100,
396 0x8b28, 0xffffffff, 0x00000100,
397 0x9144, 0xffffffff, 0x00000100,
398 0x9a60, 0xffffffff, 0x00000100,
399 0x9868, 0xffffffff, 0x00000100,
400 0x8d58, 0xffffffff, 0x00000100,
401 0x9510, 0xffffffff, 0x00000100,
402 0x949c, 0xffffffff, 0x00000100,
403 0x9654, 0xffffffff, 0x00000100,
404 0x9030, 0xffffffff, 0x00000100,
405 0x9034, 0xffffffff, 0x00000100,
406 0x9038, 0xffffffff, 0x00000100,
407 0x903c, 0xffffffff, 0x00000100,
408 0x9040, 0xffffffff, 0x00000100,
409 0xa200, 0xffffffff, 0x00000100,
410 0xa204, 0xffffffff, 0x00000100,
411 0xa208, 0xffffffff, 0x00000100,
412 0xa20c, 0xffffffff, 0x00000100,
413 0x971c, 0xffffffff, 0x00000100,
414 0x977c, 0xffffffff, 0x00000100,
415 0x3f80, 0xffffffff, 0x00000100,
416 0xa210, 0xffffffff, 0x00000100,
417 0xa214, 0xffffffff, 0x00000100,
418 0x4d8, 0xffffffff, 0x00000100,
419 0x9784, 0xffffffff, 0x00000100,
420 0x9698, 0xffffffff, 0x00000100,
421 0x4d4, 0xffffffff, 0x00000200,
422 0x30cc, 0xffffffff, 0x00000100,
423 0xd0c0, 0xffffffff, 0xff000100,
424 0x802c, 0xffffffff, 0x40000000,
425 0x915c, 0xffffffff, 0x00010000,
426 0x9178, 0xffffffff, 0x00050000,
427 0x917c, 0xffffffff, 0x00030002,
428 0x918c, 0xffffffff, 0x00010004,
429 0x9190, 0xffffffff, 0x00070006,
430 0x9194, 0xffffffff, 0x00050000,
431 0x9198, 0xffffffff, 0x00030002,
432 0x91a8, 0xffffffff, 0x00010004,
433 0x91ac, 0xffffffff, 0x00070006,
434 0x91e8, 0xffffffff, 0x00000000,
435 0x9294, 0xffffffff, 0x00000000,
436 0x929c, 0xffffffff, 0x00000001,
437 0x802c, 0xffffffff, 0xc0000000
438};
439
440static const u32 juniper_mgcg_init[] =
441{
442 0x802c, 0xffffffff, 0xc0000000,
443 0x5448, 0xffffffff, 0x00000100,
444 0x55e4, 0xffffffff, 0x00000100,
445 0x160c, 0xffffffff, 0x00000100,
446 0x5644, 0xffffffff, 0x00000100,
447 0xc164, 0xffffffff, 0x00000100,
448 0x8a18, 0xffffffff, 0x00000100,
449 0x897c, 0xffffffff, 0x06000100,
450 0x8b28, 0xffffffff, 0x00000100,
451 0x9144, 0xffffffff, 0x00000100,
452 0x9a60, 0xffffffff, 0x00000100,
453 0x9868, 0xffffffff, 0x00000100,
454 0x8d58, 0xffffffff, 0x00000100,
455 0x9510, 0xffffffff, 0x00000100,
456 0x949c, 0xffffffff, 0x00000100,
457 0x9654, 0xffffffff, 0x00000100,
458 0x9030, 0xffffffff, 0x00000100,
459 0x9034, 0xffffffff, 0x00000100,
460 0x9038, 0xffffffff, 0x00000100,
461 0x903c, 0xffffffff, 0x00000100,
462 0x9040, 0xffffffff, 0x00000100,
463 0xa200, 0xffffffff, 0x00000100,
464 0xa204, 0xffffffff, 0x00000100,
465 0xa208, 0xffffffff, 0x00000100,
466 0xa20c, 0xffffffff, 0x00000100,
467 0x971c, 0xffffffff, 0x00000100,
468 0xd0c0, 0xffffffff, 0xff000100,
469 0x802c, 0xffffffff, 0x40000000,
470 0x915c, 0xffffffff, 0x00010000,
471 0x9160, 0xffffffff, 0x00030002,
472 0x9178, 0xffffffff, 0x00070000,
473 0x917c, 0xffffffff, 0x00030002,
474 0x9180, 0xffffffff, 0x00050004,
475 0x918c, 0xffffffff, 0x00010006,
476 0x9190, 0xffffffff, 0x00090008,
477 0x9194, 0xffffffff, 0x00070000,
478 0x9198, 0xffffffff, 0x00030002,
479 0x919c, 0xffffffff, 0x00050004,
480 0x91a8, 0xffffffff, 0x00010006,
481 0x91ac, 0xffffffff, 0x00090008,
482 0x91b0, 0xffffffff, 0x00070000,
483 0x91b4, 0xffffffff, 0x00030002,
484 0x91b8, 0xffffffff, 0x00050004,
485 0x91c4, 0xffffffff, 0x00010006,
486 0x91c8, 0xffffffff, 0x00090008,
487 0x91cc, 0xffffffff, 0x00070000,
488 0x91d0, 0xffffffff, 0x00030002,
489 0x91d4, 0xffffffff, 0x00050004,
490 0x91e0, 0xffffffff, 0x00010006,
491 0x91e4, 0xffffffff, 0x00090008,
492 0x91e8, 0xffffffff, 0x00000000,
493 0x91ec, 0xffffffff, 0x00070000,
494 0x91f0, 0xffffffff, 0x00030002,
495 0x91f4, 0xffffffff, 0x00050004,
496 0x9200, 0xffffffff, 0x00010006,
497 0x9204, 0xffffffff, 0x00090008,
498 0x9208, 0xffffffff, 0x00070000,
499 0x920c, 0xffffffff, 0x00030002,
500 0x9210, 0xffffffff, 0x00050004,
501 0x921c, 0xffffffff, 0x00010006,
502 0x9220, 0xffffffff, 0x00090008,
503 0x9224, 0xffffffff, 0x00070000,
504 0x9228, 0xffffffff, 0x00030002,
505 0x922c, 0xffffffff, 0x00050004,
506 0x9238, 0xffffffff, 0x00010006,
507 0x923c, 0xffffffff, 0x00090008,
508 0x9240, 0xffffffff, 0x00070000,
509 0x9244, 0xffffffff, 0x00030002,
510 0x9248, 0xffffffff, 0x00050004,
511 0x9254, 0xffffffff, 0x00010006,
512 0x9258, 0xffffffff, 0x00090008,
513 0x925c, 0xffffffff, 0x00070000,
514 0x9260, 0xffffffff, 0x00030002,
515 0x9264, 0xffffffff, 0x00050004,
516 0x9270, 0xffffffff, 0x00010006,
517 0x9274, 0xffffffff, 0x00090008,
518 0x9278, 0xffffffff, 0x00070000,
519 0x927c, 0xffffffff, 0x00030002,
520 0x9280, 0xffffffff, 0x00050004,
521 0x928c, 0xffffffff, 0x00010006,
522 0x9290, 0xffffffff, 0x00090008,
523 0x9294, 0xffffffff, 0x00000000,
524 0x929c, 0xffffffff, 0x00000001,
525 0x802c, 0xffffffff, 0xc0000000,
526 0x977c, 0xffffffff, 0x00000100,
527 0x3f80, 0xffffffff, 0x00000100,
528 0xa210, 0xffffffff, 0x00000100,
529 0xa214, 0xffffffff, 0x00000100,
530 0x4d8, 0xffffffff, 0x00000100,
531 0x9784, 0xffffffff, 0x00000100,
532 0x9698, 0xffffffff, 0x00000100,
533 0x4d4, 0xffffffff, 0x00000200,
534 0x30cc, 0xffffffff, 0x00000100,
535 0x802c, 0xffffffff, 0xc0000000
536};
537
538static const u32 supersumo_golden_registers[] =
539{
540 0x5eb4, 0xffffffff, 0x00000002,
541 0x5cc, 0xffffffff, 0x00000001,
542 0x7030, 0xffffffff, 0x00000011,
543 0x7c30, 0xffffffff, 0x00000011,
544 0x6104, 0x01000300, 0x00000000,
545 0x5bc0, 0x00300000, 0x00000000,
546 0x8c04, 0xffffffff, 0x40600060,
547 0x8c08, 0xffffffff, 0x001c001c,
548 0x8c20, 0xffffffff, 0x00800080,
549 0x8c24, 0xffffffff, 0x00800080,
550 0x8c18, 0xffffffff, 0x20202078,
551 0x8c1c, 0xffffffff, 0x00001010,
552 0x918c, 0xffffffff, 0x00010006,
553 0x91a8, 0xffffffff, 0x00010006,
554 0x91c4, 0xffffffff, 0x00010006,
555 0x91e0, 0xffffffff, 0x00010006,
556 0x9200, 0xffffffff, 0x00010006,
557 0x9150, 0xffffffff, 0x6e944040,
558 0x917c, 0xffffffff, 0x00030002,
559 0x9180, 0xffffffff, 0x00050004,
560 0x9198, 0xffffffff, 0x00030002,
561 0x919c, 0xffffffff, 0x00050004,
562 0x91b4, 0xffffffff, 0x00030002,
563 0x91b8, 0xffffffff, 0x00050004,
564 0x91d0, 0xffffffff, 0x00030002,
565 0x91d4, 0xffffffff, 0x00050004,
566 0x91f0, 0xffffffff, 0x00030002,
567 0x91f4, 0xffffffff, 0x00050004,
568 0x915c, 0xffffffff, 0x00010000,
569 0x9160, 0xffffffff, 0x00030002,
570 0x3f90, 0xffff0000, 0xff000000,
571 0x9178, 0xffffffff, 0x00070000,
572 0x9194, 0xffffffff, 0x00070000,
573 0x91b0, 0xffffffff, 0x00070000,
574 0x91cc, 0xffffffff, 0x00070000,
575 0x91ec, 0xffffffff, 0x00070000,
576 0x9148, 0xffff0000, 0xff000000,
577 0x9190, 0xffffffff, 0x00090008,
578 0x91ac, 0xffffffff, 0x00090008,
579 0x91c8, 0xffffffff, 0x00090008,
580 0x91e4, 0xffffffff, 0x00090008,
581 0x9204, 0xffffffff, 0x00090008,
582 0x3f94, 0xffff0000, 0xff000000,
583 0x914c, 0xffff0000, 0xff000000,
584 0x929c, 0xffffffff, 0x00000001,
585 0x8a18, 0xffffffff, 0x00000100,
586 0x8b28, 0xffffffff, 0x00000100,
587 0x9144, 0xffffffff, 0x00000100,
588 0x5644, 0xffffffff, 0x00000100,
589 0x9b7c, 0xffffffff, 0x00000000,
590 0x8030, 0xffffffff, 0x0000100a,
591 0x8a14, 0xffffffff, 0x00000007,
592 0x8b24, 0xffffffff, 0x00ff0fff,
593 0x8b10, 0xffffffff, 0x00000000,
594 0x28a4c, 0x06000000, 0x06000000,
595 0x4d8, 0xffffffff, 0x00000100,
596 0x913c, 0xffff000f, 0x0100000a,
597 0x960c, 0xffffffff, 0x54763210,
598 0x88c4, 0xffffffff, 0x000000c2,
599 0x88d4, 0xffffffff, 0x00000010,
600 0x8974, 0xffffffff, 0x00000000,
601 0xc78, 0x00000080, 0x00000080,
602 0x5e78, 0xffffffff, 0x001000f0,
603 0xd02c, 0xffffffff, 0x08421000,
604 0xa008, 0xffffffff, 0x00010000,
605 0x8d00, 0xffffffff, 0x100e4848,
606 0x8d04, 0xffffffff, 0x00164745,
607 0x8c00, 0xffffffff, 0xe4000003,
608 0x8cf0, 0x1fffffff, 0x08e00620,
609 0x28350, 0xffffffff, 0x00000000,
610 0x9508, 0xffffffff, 0x00000002
611};
612
613static const u32 sumo_golden_registers[] =
614{
615 0x900c, 0x00ffffff, 0x0017071f,
616 0x8c18, 0xffffffff, 0x10101060,
617 0x8c1c, 0xffffffff, 0x00001010,
618 0x8c30, 0x0000000f, 0x00000005,
619 0x9688, 0x0000000f, 0x00000007
620};
621
622static const u32 wrestler_golden_registers[] =
623{
624 0x5eb4, 0xffffffff, 0x00000002,
625 0x5cc, 0xffffffff, 0x00000001,
626 0x7030, 0xffffffff, 0x00000011,
627 0x7c30, 0xffffffff, 0x00000011,
628 0x6104, 0x01000300, 0x00000000,
629 0x5bc0, 0x00300000, 0x00000000,
630 0x918c, 0xffffffff, 0x00010006,
631 0x91a8, 0xffffffff, 0x00010006,
632 0x9150, 0xffffffff, 0x6e944040,
633 0x917c, 0xffffffff, 0x00030002,
634 0x9198, 0xffffffff, 0x00030002,
635 0x915c, 0xffffffff, 0x00010000,
636 0x3f90, 0xffff0000, 0xff000000,
637 0x9178, 0xffffffff, 0x00070000,
638 0x9194, 0xffffffff, 0x00070000,
639 0x9148, 0xffff0000, 0xff000000,
640 0x9190, 0xffffffff, 0x00090008,
641 0x91ac, 0xffffffff, 0x00090008,
642 0x3f94, 0xffff0000, 0xff000000,
643 0x914c, 0xffff0000, 0xff000000,
644 0x929c, 0xffffffff, 0x00000001,
645 0x8a18, 0xffffffff, 0x00000100,
646 0x8b28, 0xffffffff, 0x00000100,
647 0x9144, 0xffffffff, 0x00000100,
648 0x9b7c, 0xffffffff, 0x00000000,
649 0x8030, 0xffffffff, 0x0000100a,
650 0x8a14, 0xffffffff, 0x00000001,
651 0x8b24, 0xffffffff, 0x00ff0fff,
652 0x8b10, 0xffffffff, 0x00000000,
653 0x28a4c, 0x06000000, 0x06000000,
654 0x4d8, 0xffffffff, 0x00000100,
655 0x913c, 0xffff000f, 0x0100000a,
656 0x960c, 0xffffffff, 0x54763210,
657 0x88c4, 0xffffffff, 0x000000c2,
658 0x88d4, 0xffffffff, 0x00000010,
659 0x8974, 0xffffffff, 0x00000000,
660 0xc78, 0x00000080, 0x00000080,
661 0x5e78, 0xffffffff, 0x001000f0,
662 0xd02c, 0xffffffff, 0x08421000,
663 0xa008, 0xffffffff, 0x00010000,
664 0x8d00, 0xffffffff, 0x100e4848,
665 0x8d04, 0xffffffff, 0x00164745,
666 0x8c00, 0xffffffff, 0xe4000003,
667 0x8cf0, 0x1fffffff, 0x08e00410,
668 0x28350, 0xffffffff, 0x00000000,
669 0x9508, 0xffffffff, 0x00000002,
670 0x900c, 0xffffffff, 0x0017071f,
671 0x8c18, 0xffffffff, 0x10101060,
672 0x8c1c, 0xffffffff, 0x00001010
673};
674
675static const u32 barts_golden_registers[] =
676{
677 0x5eb4, 0xffffffff, 0x00000002,
678 0x5e78, 0x8f311ff1, 0x001000f0,
679 0x3f90, 0xffff0000, 0xff000000,
680 0x9148, 0xffff0000, 0xff000000,
681 0x3f94, 0xffff0000, 0xff000000,
682 0x914c, 0xffff0000, 0xff000000,
683 0xc78, 0x00000080, 0x00000080,
684 0xbd4, 0x70073777, 0x00010001,
685 0xd02c, 0xbfffff1f, 0x08421000,
686 0xd0b8, 0x03773777, 0x02011003,
687 0x5bc0, 0x00200000, 0x50100000,
688 0x98f8, 0x33773777, 0x02011003,
689 0x98fc, 0xffffffff, 0x76543210,
690 0x7030, 0x31000311, 0x00000011,
691 0x2f48, 0x00000007, 0x02011003,
692 0x6b28, 0x00000010, 0x00000012,
693 0x7728, 0x00000010, 0x00000012,
694 0x10328, 0x00000010, 0x00000012,
695 0x10f28, 0x00000010, 0x00000012,
696 0x11b28, 0x00000010, 0x00000012,
697 0x12728, 0x00000010, 0x00000012,
698 0x240c, 0x000007ff, 0x00000380,
699 0x8a14, 0xf000001f, 0x00000007,
700 0x8b24, 0x3fff3fff, 0x00ff0fff,
701 0x8b10, 0x0000ff0f, 0x00000000,
702 0x28a4c, 0x07ffffff, 0x06000000,
703 0x10c, 0x00000001, 0x00010003,
704 0xa02c, 0xffffffff, 0x0000009b,
705 0x913c, 0x0000000f, 0x0100000a,
706 0x8d00, 0xffff7f7f, 0x100e4848,
707 0x8d04, 0x00ffffff, 0x00164745,
708 0x8c00, 0xfffc0003, 0xe4000003,
709 0x8c04, 0xf8ff00ff, 0x40600060,
710 0x8c08, 0x00ff00ff, 0x001c001c,
711 0x8cf0, 0x1fff1fff, 0x08e00620,
712 0x8c20, 0x0fff0fff, 0x00800080,
713 0x8c24, 0x0fff0fff, 0x00800080,
714 0x8c18, 0xffffffff, 0x20202078,
715 0x8c1c, 0x0000ffff, 0x00001010,
716 0x28350, 0x00000f01, 0x00000000,
717 0x9508, 0x3700001f, 0x00000002,
718 0x960c, 0xffffffff, 0x54763210,
719 0x88c4, 0x001f3ae3, 0x000000c2,
720 0x88d4, 0x0000001f, 0x00000010,
721 0x8974, 0xffffffff, 0x00000000
722};
723
724static const u32 turks_golden_registers[] =
725{
726 0x5eb4, 0xffffffff, 0x00000002,
727 0x5e78, 0x8f311ff1, 0x001000f0,
728 0x8c8, 0x00003000, 0x00001070,
729 0x8cc, 0x000fffff, 0x00040035,
730 0x3f90, 0xffff0000, 0xfff00000,
731 0x9148, 0xffff0000, 0xfff00000,
732 0x3f94, 0xffff0000, 0xfff00000,
733 0x914c, 0xffff0000, 0xfff00000,
734 0xc78, 0x00000080, 0x00000080,
735 0xbd4, 0x00073007, 0x00010002,
736 0xd02c, 0xbfffff1f, 0x08421000,
737 0xd0b8, 0x03773777, 0x02010002,
738 0x5bc0, 0x00200000, 0x50100000,
739 0x98f8, 0x33773777, 0x00010002,
740 0x98fc, 0xffffffff, 0x33221100,
741 0x7030, 0x31000311, 0x00000011,
742 0x2f48, 0x33773777, 0x00010002,
743 0x6b28, 0x00000010, 0x00000012,
744 0x7728, 0x00000010, 0x00000012,
745 0x10328, 0x00000010, 0x00000012,
746 0x10f28, 0x00000010, 0x00000012,
747 0x11b28, 0x00000010, 0x00000012,
748 0x12728, 0x00000010, 0x00000012,
749 0x240c, 0x000007ff, 0x00000380,
750 0x8a14, 0xf000001f, 0x00000007,
751 0x8b24, 0x3fff3fff, 0x00ff0fff,
752 0x8b10, 0x0000ff0f, 0x00000000,
753 0x28a4c, 0x07ffffff, 0x06000000,
754 0x10c, 0x00000001, 0x00010003,
755 0xa02c, 0xffffffff, 0x0000009b,
756 0x913c, 0x0000000f, 0x0100000a,
757 0x8d00, 0xffff7f7f, 0x100e4848,
758 0x8d04, 0x00ffffff, 0x00164745,
759 0x8c00, 0xfffc0003, 0xe4000003,
760 0x8c04, 0xf8ff00ff, 0x40600060,
761 0x8c08, 0x00ff00ff, 0x001c001c,
762 0x8cf0, 0x1fff1fff, 0x08e00410,
763 0x8c20, 0x0fff0fff, 0x00800080,
764 0x8c24, 0x0fff0fff, 0x00800080,
765 0x8c18, 0xffffffff, 0x20202078,
766 0x8c1c, 0x0000ffff, 0x00001010,
767 0x28350, 0x00000f01, 0x00000000,
768 0x9508, 0x3700001f, 0x00000002,
769 0x960c, 0xffffffff, 0x54763210,
770 0x88c4, 0x001f3ae3, 0x000000c2,
771 0x88d4, 0x0000001f, 0x00000010,
772 0x8974, 0xffffffff, 0x00000000
773};
774
775static const u32 caicos_golden_registers[] =
776{
777 0x5eb4, 0xffffffff, 0x00000002,
778 0x5e78, 0x8f311ff1, 0x001000f0,
779 0x8c8, 0x00003420, 0x00001450,
780 0x8cc, 0x000fffff, 0x00040035,
781 0x3f90, 0xffff0000, 0xfffc0000,
782 0x9148, 0xffff0000, 0xfffc0000,
783 0x3f94, 0xffff0000, 0xfffc0000,
784 0x914c, 0xffff0000, 0xfffc0000,
785 0xc78, 0x00000080, 0x00000080,
786 0xbd4, 0x00073007, 0x00010001,
787 0xd02c, 0xbfffff1f, 0x08421000,
788 0xd0b8, 0x03773777, 0x02010001,
789 0x5bc0, 0x00200000, 0x50100000,
790 0x98f8, 0x33773777, 0x02010001,
791 0x98fc, 0xffffffff, 0x33221100,
792 0x7030, 0x31000311, 0x00000011,
793 0x2f48, 0x33773777, 0x02010001,
794 0x6b28, 0x00000010, 0x00000012,
795 0x7728, 0x00000010, 0x00000012,
796 0x10328, 0x00000010, 0x00000012,
797 0x10f28, 0x00000010, 0x00000012,
798 0x11b28, 0x00000010, 0x00000012,
799 0x12728, 0x00000010, 0x00000012,
800 0x240c, 0x000007ff, 0x00000380,
801 0x8a14, 0xf000001f, 0x00000001,
802 0x8b24, 0x3fff3fff, 0x00ff0fff,
803 0x8b10, 0x0000ff0f, 0x00000000,
804 0x28a4c, 0x07ffffff, 0x06000000,
805 0x10c, 0x00000001, 0x00010003,
806 0xa02c, 0xffffffff, 0x0000009b,
807 0x913c, 0x0000000f, 0x0100000a,
808 0x8d00, 0xffff7f7f, 0x100e4848,
809 0x8d04, 0x00ffffff, 0x00164745,
810 0x8c00, 0xfffc0003, 0xe4000003,
811 0x8c04, 0xf8ff00ff, 0x40600060,
812 0x8c08, 0x00ff00ff, 0x001c001c,
813 0x8cf0, 0x1fff1fff, 0x08e00410,
814 0x8c20, 0x0fff0fff, 0x00800080,
815 0x8c24, 0x0fff0fff, 0x00800080,
816 0x8c18, 0xffffffff, 0x20202078,
817 0x8c1c, 0x0000ffff, 0x00001010,
818 0x28350, 0x00000f01, 0x00000000,
819 0x9508, 0x3700001f, 0x00000002,
820 0x960c, 0xffffffff, 0x54763210,
821 0x88c4, 0x001f3ae3, 0x000000c2,
822 0x88d4, 0x0000001f, 0x00000010,
823 0x8974, 0xffffffff, 0x00000000
824};
825
826static void evergreen_init_golden_registers(struct radeon_device *rdev)
827{
828 switch (rdev->family) {
829 case CHIP_CYPRESS:
830 case CHIP_HEMLOCK:
831 radeon_program_register_sequence(rdev,
832 evergreen_golden_registers,
833 (const u32)ARRAY_SIZE(evergreen_golden_registers));
834 radeon_program_register_sequence(rdev,
835 evergreen_golden_registers2,
836 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
837 radeon_program_register_sequence(rdev,
838 cypress_mgcg_init,
839 (const u32)ARRAY_SIZE(cypress_mgcg_init));
840 break;
841 case CHIP_JUNIPER:
842 radeon_program_register_sequence(rdev,
843 evergreen_golden_registers,
844 (const u32)ARRAY_SIZE(evergreen_golden_registers));
845 radeon_program_register_sequence(rdev,
846 evergreen_golden_registers2,
847 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
848 radeon_program_register_sequence(rdev,
849 juniper_mgcg_init,
850 (const u32)ARRAY_SIZE(juniper_mgcg_init));
851 break;
852 case CHIP_REDWOOD:
853 radeon_program_register_sequence(rdev,
854 evergreen_golden_registers,
855 (const u32)ARRAY_SIZE(evergreen_golden_registers));
856 radeon_program_register_sequence(rdev,
857 evergreen_golden_registers2,
858 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
859 radeon_program_register_sequence(rdev,
860 redwood_mgcg_init,
861 (const u32)ARRAY_SIZE(redwood_mgcg_init));
862 break;
863 case CHIP_CEDAR:
864 radeon_program_register_sequence(rdev,
865 cedar_golden_registers,
866 (const u32)ARRAY_SIZE(cedar_golden_registers));
867 radeon_program_register_sequence(rdev,
868 evergreen_golden_registers2,
869 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
870 radeon_program_register_sequence(rdev,
871 cedar_mgcg_init,
872 (const u32)ARRAY_SIZE(cedar_mgcg_init));
873 break;
874 case CHIP_PALM:
875 radeon_program_register_sequence(rdev,
876 wrestler_golden_registers,
877 (const u32)ARRAY_SIZE(wrestler_golden_registers));
878 break;
879 case CHIP_SUMO:
880 radeon_program_register_sequence(rdev,
881 supersumo_golden_registers,
882 (const u32)ARRAY_SIZE(supersumo_golden_registers));
883 break;
884 case CHIP_SUMO2:
885 radeon_program_register_sequence(rdev,
886 supersumo_golden_registers,
887 (const u32)ARRAY_SIZE(supersumo_golden_registers));
888 radeon_program_register_sequence(rdev,
889 sumo_golden_registers,
890 (const u32)ARRAY_SIZE(sumo_golden_registers));
891 break;
892 case CHIP_BARTS:
893 radeon_program_register_sequence(rdev,
894 barts_golden_registers,
895 (const u32)ARRAY_SIZE(barts_golden_registers));
896 break;
897 case CHIP_TURKS:
898 radeon_program_register_sequence(rdev,
899 turks_golden_registers,
900 (const u32)ARRAY_SIZE(turks_golden_registers));
901 break;
902 case CHIP_CAICOS:
903 radeon_program_register_sequence(rdev,
904 caicos_golden_registers,
905 (const u32)ARRAY_SIZE(caicos_golden_registers));
906 break;
907 default:
908 break;
909 }
910}
911
Jerome Glisse285484e2011-12-16 17:03:42 -0500912void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
913 unsigned *bankh, unsigned *mtaspect,
914 unsigned *tile_split)
915{
916 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
917 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
918 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
919 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
920 switch (*bankw) {
921 default:
922 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
923 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
924 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
925 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
926 }
927 switch (*bankh) {
928 default:
929 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
930 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
931 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
932 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
933 }
934 switch (*mtaspect) {
935 default:
936 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
937 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
938 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
939 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
940 }
941}
942
Alex Deucher23d33ba2013-04-08 12:41:32 +0200943static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
944 u32 cntl_reg, u32 status_reg)
945{
946 int r, i;
947 struct atom_clock_dividers dividers;
948
949 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
950 clock, false, &dividers);
951 if (r)
952 return r;
953
954 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
955
956 for (i = 0; i < 100; i++) {
957 if (RREG32(status_reg) & DCLK_STATUS)
958 break;
959 mdelay(10);
960 }
961 if (i == 100)
962 return -ETIMEDOUT;
963
964 return 0;
965}
966
967int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
968{
969 int r = 0;
970 u32 cg_scratch = RREG32(CG_SCRATCH1);
971
972 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
973 if (r)
974 goto done;
975 cg_scratch &= 0xffff0000;
976 cg_scratch |= vclk / 100; /* Mhz */
977
978 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
979 if (r)
980 goto done;
981 cg_scratch &= 0x0000ffff;
982 cg_scratch |= (dclk / 100) << 16; /* Mhz */
983
984done:
985 WREG32(CG_SCRATCH1, cg_scratch);
986
987 return r;
988}
989
Alex Deuchera8b49252013-04-08 12:41:33 +0200990int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
991{
992 /* start off with something large */
Christian Königfacd1122013-04-29 11:55:02 +0200993 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
Alex Deuchera8b49252013-04-08 12:41:33 +0200994 int r;
995
Christian König4ed10832013-04-18 15:25:58 +0200996 /* bypass vclk and dclk with bclk */
997 WREG32_P(CG_UPLL_FUNC_CNTL_2,
998 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
999 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1000
1001 /* put PLL in bypass mode */
1002 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1003
1004 if (!vclk || !dclk) {
1005 /* keep the Bypass mode, put PLL to sleep */
1006 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1007 return 0;
1008 }
1009
Christian Königfacd1122013-04-29 11:55:02 +02001010 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1011 16384, 0x03FFFFFF, 0, 128, 5,
1012 &fb_div, &vclk_div, &dclk_div);
1013 if (r)
1014 return r;
Alex Deuchera8b49252013-04-08 12:41:33 +02001015
1016 /* set VCO_MODE to 1 */
1017 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1018
1019 /* toggle UPLL_SLEEP to 1 then back to 0 */
1020 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1021 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1022
1023 /* deassert UPLL_RESET */
1024 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1025
1026 mdelay(1);
1027
Christian Königfacd1122013-04-29 11:55:02 +02001028 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001029 if (r)
1030 return r;
1031
1032 /* assert UPLL_RESET again */
1033 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1034
1035 /* disable spread spectrum. */
1036 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1037
1038 /* set feedback divider */
Christian Königfacd1122013-04-29 11:55:02 +02001039 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
Alex Deuchera8b49252013-04-08 12:41:33 +02001040
1041 /* set ref divider to 0 */
1042 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1043
Christian Königfacd1122013-04-29 11:55:02 +02001044 if (fb_div < 307200)
Alex Deuchera8b49252013-04-08 12:41:33 +02001045 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1046 else
1047 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1048
1049 /* set PDIV_A and PDIV_B */
1050 WREG32_P(CG_UPLL_FUNC_CNTL_2,
Christian Königfacd1122013-04-29 11:55:02 +02001051 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
Alex Deuchera8b49252013-04-08 12:41:33 +02001052 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1053
1054 /* give the PLL some time to settle */
1055 mdelay(15);
1056
1057 /* deassert PLL_RESET */
1058 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1059
1060 mdelay(15);
1061
1062 /* switch from bypass mode to normal mode */
1063 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1064
Christian Königfacd1122013-04-29 11:55:02 +02001065 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001066 if (r)
1067 return r;
1068
1069 /* switch VCLK and DCLK selection */
1070 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1071 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1072 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1073
1074 mdelay(100);
1075
1076 return 0;
1077}
1078
Alex Deucherd054ac12011-09-01 17:46:15 +00001079void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1080{
1081 u16 ctl, v;
Jiang Liu32195ae2012-07-24 17:20:30 +08001082 int err;
Alex Deucherd054ac12011-09-01 17:46:15 +00001083
Jiang Liu32195ae2012-07-24 17:20:30 +08001084 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001085 if (err)
1086 return;
1087
1088 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1089
1090 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1091 * to avoid hangs or perfomance issues
1092 */
1093 if ((v == 0) || (v == 6) || (v == 7)) {
1094 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1095 ctl |= (2 << 12);
Jiang Liu32195ae2012-07-24 17:20:30 +08001096 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001097 }
1098}
1099
Alex Deucher10257a62013-04-09 18:49:59 -04001100static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1101{
1102 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1103 return true;
1104 else
1105 return false;
1106}
1107
1108static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1109{
1110 u32 pos1, pos2;
1111
1112 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1113 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1114
1115 if (pos1 != pos2)
1116 return true;
1117 else
1118 return false;
1119}
1120
Alex Deucher377edc82012-07-17 14:02:42 -04001121/**
1122 * dce4_wait_for_vblank - vblank wait asic callback.
1123 *
1124 * @rdev: radeon_device pointer
1125 * @crtc: crtc to wait for vblank on
1126 *
1127 * Wait for vblank on the requested crtc (evergreen+).
1128 */
Alex Deucher3ae19b72012-02-23 17:53:37 -05001129void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1130{
Alex Deucher10257a62013-04-09 18:49:59 -04001131 unsigned i = 0;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001132
Alex Deucher4a159032012-08-15 17:13:53 -04001133 if (crtc >= rdev->num_crtc)
1134 return;
1135
Alex Deucher10257a62013-04-09 18:49:59 -04001136 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1137 return;
1138
1139 /* depending on when we hit vblank, we may be close to active; if so,
1140 * wait for another frame.
1141 */
1142 while (dce4_is_in_vblank(rdev, crtc)) {
1143 if (i++ % 100 == 0) {
1144 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001145 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001146 }
Alex Deucher10257a62013-04-09 18:49:59 -04001147 }
1148
1149 while (!dce4_is_in_vblank(rdev, crtc)) {
1150 if (i++ % 100 == 0) {
1151 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001152 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001153 }
1154 }
1155}
1156
Alex Deucher377edc82012-07-17 14:02:42 -04001157/**
1158 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1159 *
1160 * @rdev: radeon_device pointer
1161 * @crtc: crtc to prepare for pageflip on
1162 *
1163 * Pre-pageflip callback (evergreen+).
1164 * Enables the pageflip irq (vblank irq).
1165 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001166void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1167{
Alex Deucher6f34be52010-11-21 10:59:01 -05001168 /* enable the pflip int */
1169 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1170}
1171
Alex Deucher377edc82012-07-17 14:02:42 -04001172/**
1173 * evergreen_post_page_flip - pos-pageflip callback.
1174 *
1175 * @rdev: radeon_device pointer
1176 * @crtc: crtc to cleanup pageflip on
1177 *
1178 * Post-pageflip callback (evergreen+).
1179 * Disables the pageflip irq (vblank irq).
1180 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001181void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1182{
1183 /* disable the pflip int */
1184 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1185}
1186
Alex Deucher377edc82012-07-17 14:02:42 -04001187/**
1188 * evergreen_page_flip - pageflip callback.
1189 *
1190 * @rdev: radeon_device pointer
1191 * @crtc_id: crtc to cleanup pageflip on
1192 * @crtc_base: new address of the crtc (GPU MC address)
1193 *
1194 * Does the actual pageflip (evergreen+).
1195 * During vblank we take the crtc lock and wait for the update_pending
1196 * bit to go high, when it does, we release the lock, and allow the
1197 * double buffered update to take place.
1198 * Returns the current update pending status.
1199 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001200u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1201{
1202 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1203 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
Alex Deucherf6496472011-11-28 14:49:26 -05001204 int i;
Alex Deucher6f34be52010-11-21 10:59:01 -05001205
1206 /* Lock the graphics update lock */
1207 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1208 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1209
1210 /* update the scanout addresses */
1211 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1212 upper_32_bits(crtc_base));
1213 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1214 (u32)crtc_base);
1215
1216 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1217 upper_32_bits(crtc_base));
1218 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1219 (u32)crtc_base);
1220
1221 /* Wait for update_pending to go high. */
Alex Deucherf6496472011-11-28 14:49:26 -05001222 for (i = 0; i < rdev->usec_timeout; i++) {
1223 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1224 break;
1225 udelay(1);
1226 }
Alex Deucher6f34be52010-11-21 10:59:01 -05001227 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1228
1229 /* Unlock the lock, so double-buffering can take place inside vblank */
1230 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1231 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1232
1233 /* Return current update_pending status: */
1234 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1235}
1236
Alex Deucher21a81222010-07-02 12:58:16 -04001237/* get temperature in millidegrees */
Alex Deucher20d391d2011-02-01 16:12:34 -05001238int evergreen_get_temp(struct radeon_device *rdev)
Alex Deucher21a81222010-07-02 12:58:16 -04001239{
Alex Deucher1c88d742011-06-14 19:15:53 +00001240 u32 temp, toffset;
1241 int actual_temp = 0;
Alex Deucher21a81222010-07-02 12:58:16 -04001242
Alex Deucher67b3f822011-05-25 18:45:37 -04001243 if (rdev->family == CHIP_JUNIPER) {
1244 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1245 TOFFSET_SHIFT;
1246 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1247 TS0_ADC_DOUT_SHIFT;
Alex Deucher21a81222010-07-02 12:58:16 -04001248
Alex Deucher67b3f822011-05-25 18:45:37 -04001249 if (toffset & 0x100)
1250 actual_temp = temp / 2 - (0x200 - toffset);
1251 else
1252 actual_temp = temp / 2 + toffset;
1253
1254 actual_temp = actual_temp * 1000;
1255
1256 } else {
1257 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1258 ASIC_T_SHIFT;
1259
1260 if (temp & 0x400)
1261 actual_temp = -256;
1262 else if (temp & 0x200)
1263 actual_temp = 255;
1264 else if (temp & 0x100) {
1265 actual_temp = temp & 0x1ff;
1266 actual_temp |= ~0x1ff;
1267 } else
1268 actual_temp = temp & 0xff;
1269
1270 actual_temp = (actual_temp * 1000) / 2;
1271 }
1272
1273 return actual_temp;
Alex Deucher21a81222010-07-02 12:58:16 -04001274}
1275
Alex Deucher20d391d2011-02-01 16:12:34 -05001276int sumo_get_temp(struct radeon_device *rdev)
Alex Deuchere33df252010-11-22 17:56:32 -05001277{
1278 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
Alex Deucher20d391d2011-02-01 16:12:34 -05001279 int actual_temp = temp - 49;
Alex Deuchere33df252010-11-22 17:56:32 -05001280
1281 return actual_temp * 1000;
1282}
1283
Alex Deucher377edc82012-07-17 14:02:42 -04001284/**
1285 * sumo_pm_init_profile - Initialize power profiles callback.
1286 *
1287 * @rdev: radeon_device pointer
1288 *
1289 * Initialize the power states used in profile mode
1290 * (sumo, trinity, SI).
1291 * Used for profile mode only.
1292 */
Alex Deuchera4c9e2e2011-11-04 10:09:41 -04001293void sumo_pm_init_profile(struct radeon_device *rdev)
1294{
1295 int idx;
1296
1297 /* default */
1298 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1299 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1300 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1301 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1302
1303 /* low,mid sh/mh */
1304 if (rdev->flags & RADEON_IS_MOBILITY)
1305 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1306 else
1307 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1308
1309 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1310 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1311 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1312 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1313
1314 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1315 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1316 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1317 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1318
1319 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1320 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1321 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1322 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1323
1324 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1325 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1326 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1327 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1328
1329 /* high sh/mh */
1330 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1331 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1332 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1333 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1334 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1335 rdev->pm.power_state[idx].num_clock_modes - 1;
1336
1337 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1338 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1339 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1340 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1341 rdev->pm.power_state[idx].num_clock_modes - 1;
1342}
1343
Alex Deucher377edc82012-07-17 14:02:42 -04001344/**
Alex Deucher27810fb2012-10-01 19:25:11 -04001345 * btc_pm_init_profile - Initialize power profiles callback.
1346 *
1347 * @rdev: radeon_device pointer
1348 *
1349 * Initialize the power states used in profile mode
1350 * (BTC, cayman).
1351 * Used for profile mode only.
1352 */
1353void btc_pm_init_profile(struct radeon_device *rdev)
1354{
1355 int idx;
1356
1357 /* default */
1358 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1359 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1360 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1361 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1362 /* starting with BTC, there is one state that is used for both
1363 * MH and SH. Difference is that we always use the high clock index for
1364 * mclk.
1365 */
1366 if (rdev->flags & RADEON_IS_MOBILITY)
1367 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1368 else
1369 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1370 /* low sh */
1371 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1372 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1373 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1374 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1375 /* mid sh */
1376 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1377 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1378 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1379 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1380 /* high sh */
1381 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1382 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1383 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1384 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1385 /* low mh */
1386 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1387 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1388 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1389 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1390 /* mid mh */
1391 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1392 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1393 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1394 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1395 /* high mh */
1396 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1397 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1398 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1399 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1400}
1401
1402/**
Alex Deucher377edc82012-07-17 14:02:42 -04001403 * evergreen_pm_misc - set additional pm hw parameters callback.
1404 *
1405 * @rdev: radeon_device pointer
1406 *
1407 * Set non-clock parameters associated with a power state
1408 * (voltage, etc.) (evergreen+).
1409 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001410void evergreen_pm_misc(struct radeon_device *rdev)
1411{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -04001412 int req_ps_idx = rdev->pm.requested_power_state_index;
1413 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1414 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1415 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -04001416
Alex Deucher2feea492011-04-12 14:49:24 -04001417 if (voltage->type == VOLTAGE_SW) {
Alex Deuchera377e182011-06-20 13:00:31 -04001418 /* 0xff01 is a flag rather then an actual voltage */
1419 if (voltage->voltage == 0xff01)
1420 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001421 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
Alex Deucher8a83ec52011-04-12 14:49:23 -04001422 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
Alex Deucher4d601732010-06-07 18:15:18 -04001423 rdev->pm.current_vddc = voltage->voltage;
Alex Deucher2feea492011-04-12 14:49:24 -04001424 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1425 }
Alex Deucher7ae764b2013-02-11 08:44:48 -05001426
1427 /* starting with BTC, there is one state that is used for both
1428 * MH and SH. Difference is that we always use the high clock index for
1429 * mclk and vddci.
1430 */
1431 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1432 (rdev->family >= CHIP_BARTS) &&
1433 rdev->pm.active_crtc_count &&
1434 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1435 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1436 voltage = &rdev->pm.power_state[req_ps_idx].
1437 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1438
Alex Deuchera377e182011-06-20 13:00:31 -04001439 /* 0xff01 is a flag rather then an actual voltage */
1440 if (voltage->vddci == 0xff01)
1441 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001442 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1443 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1444 rdev->pm.current_vddci = voltage->vddci;
1445 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
Alex Deucher4d601732010-06-07 18:15:18 -04001446 }
1447 }
Alex Deucher49e02b72010-04-23 17:57:27 -04001448}
1449
Alex Deucher377edc82012-07-17 14:02:42 -04001450/**
1451 * evergreen_pm_prepare - pre-power state change callback.
1452 *
1453 * @rdev: radeon_device pointer
1454 *
1455 * Prepare for a power state change (evergreen+).
1456 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001457void evergreen_pm_prepare(struct radeon_device *rdev)
1458{
1459 struct drm_device *ddev = rdev->ddev;
1460 struct drm_crtc *crtc;
1461 struct radeon_crtc *radeon_crtc;
1462 u32 tmp;
1463
1464 /* disable any active CRTCs */
1465 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1466 radeon_crtc = to_radeon_crtc(crtc);
1467 if (radeon_crtc->enabled) {
1468 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1469 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1470 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1471 }
1472 }
1473}
1474
Alex Deucher377edc82012-07-17 14:02:42 -04001475/**
1476 * evergreen_pm_finish - post-power state change callback.
1477 *
1478 * @rdev: radeon_device pointer
1479 *
1480 * Clean up after a power state change (evergreen+).
1481 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001482void evergreen_pm_finish(struct radeon_device *rdev)
1483{
1484 struct drm_device *ddev = rdev->ddev;
1485 struct drm_crtc *crtc;
1486 struct radeon_crtc *radeon_crtc;
1487 u32 tmp;
1488
1489 /* enable any active CRTCs */
1490 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1491 radeon_crtc = to_radeon_crtc(crtc);
1492 if (radeon_crtc->enabled) {
1493 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1494 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1495 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1496 }
1497 }
1498}
1499
Alex Deucher377edc82012-07-17 14:02:42 -04001500/**
1501 * evergreen_hpd_sense - hpd sense callback.
1502 *
1503 * @rdev: radeon_device pointer
1504 * @hpd: hpd (hotplug detect) pin
1505 *
1506 * Checks if a digital monitor is connected (evergreen+).
1507 * Returns true if connected, false if not connected.
1508 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001509bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1510{
1511 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001512
1513 switch (hpd) {
1514 case RADEON_HPD_1:
1515 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1516 connected = true;
1517 break;
1518 case RADEON_HPD_2:
1519 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1520 connected = true;
1521 break;
1522 case RADEON_HPD_3:
1523 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1524 connected = true;
1525 break;
1526 case RADEON_HPD_4:
1527 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1528 connected = true;
1529 break;
1530 case RADEON_HPD_5:
1531 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1532 connected = true;
1533 break;
1534 case RADEON_HPD_6:
1535 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1536 connected = true;
1537 break;
1538 default:
1539 break;
1540 }
1541
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001542 return connected;
1543}
1544
Alex Deucher377edc82012-07-17 14:02:42 -04001545/**
1546 * evergreen_hpd_set_polarity - hpd set polarity callback.
1547 *
1548 * @rdev: radeon_device pointer
1549 * @hpd: hpd (hotplug detect) pin
1550 *
1551 * Set the polarity of the hpd pin (evergreen+).
1552 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001553void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1554 enum radeon_hpd_id hpd)
1555{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001556 u32 tmp;
1557 bool connected = evergreen_hpd_sense(rdev, hpd);
1558
1559 switch (hpd) {
1560 case RADEON_HPD_1:
1561 tmp = RREG32(DC_HPD1_INT_CONTROL);
1562 if (connected)
1563 tmp &= ~DC_HPDx_INT_POLARITY;
1564 else
1565 tmp |= DC_HPDx_INT_POLARITY;
1566 WREG32(DC_HPD1_INT_CONTROL, tmp);
1567 break;
1568 case RADEON_HPD_2:
1569 tmp = RREG32(DC_HPD2_INT_CONTROL);
1570 if (connected)
1571 tmp &= ~DC_HPDx_INT_POLARITY;
1572 else
1573 tmp |= DC_HPDx_INT_POLARITY;
1574 WREG32(DC_HPD2_INT_CONTROL, tmp);
1575 break;
1576 case RADEON_HPD_3:
1577 tmp = RREG32(DC_HPD3_INT_CONTROL);
1578 if (connected)
1579 tmp &= ~DC_HPDx_INT_POLARITY;
1580 else
1581 tmp |= DC_HPDx_INT_POLARITY;
1582 WREG32(DC_HPD3_INT_CONTROL, tmp);
1583 break;
1584 case RADEON_HPD_4:
1585 tmp = RREG32(DC_HPD4_INT_CONTROL);
1586 if (connected)
1587 tmp &= ~DC_HPDx_INT_POLARITY;
1588 else
1589 tmp |= DC_HPDx_INT_POLARITY;
1590 WREG32(DC_HPD4_INT_CONTROL, tmp);
1591 break;
1592 case RADEON_HPD_5:
1593 tmp = RREG32(DC_HPD5_INT_CONTROL);
1594 if (connected)
1595 tmp &= ~DC_HPDx_INT_POLARITY;
1596 else
1597 tmp |= DC_HPDx_INT_POLARITY;
1598 WREG32(DC_HPD5_INT_CONTROL, tmp);
1599 break;
1600 case RADEON_HPD_6:
1601 tmp = RREG32(DC_HPD6_INT_CONTROL);
1602 if (connected)
1603 tmp &= ~DC_HPDx_INT_POLARITY;
1604 else
1605 tmp |= DC_HPDx_INT_POLARITY;
1606 WREG32(DC_HPD6_INT_CONTROL, tmp);
1607 break;
1608 default:
1609 break;
1610 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001611}
1612
Alex Deucher377edc82012-07-17 14:02:42 -04001613/**
1614 * evergreen_hpd_init - hpd setup callback.
1615 *
1616 * @rdev: radeon_device pointer
1617 *
1618 * Setup the hpd pins used by the card (evergreen+).
1619 * Enable the pin, set the polarity, and enable the hpd interrupts.
1620 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001621void evergreen_hpd_init(struct radeon_device *rdev)
1622{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001623 struct drm_device *dev = rdev->ddev;
1624 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001625 unsigned enabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001626 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1627 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001628
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001629 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1630 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
Alex Deucher2e97be72013-04-11 12:45:34 -04001631
1632 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1633 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1634 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1635 * aux dp channel on imac and help (but not completely fix)
1636 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1637 * also avoid interrupt storms during dpms.
1638 */
1639 continue;
1640 }
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001641 switch (radeon_connector->hpd.hpd) {
1642 case RADEON_HPD_1:
1643 WREG32(DC_HPD1_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001644 break;
1645 case RADEON_HPD_2:
1646 WREG32(DC_HPD2_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001647 break;
1648 case RADEON_HPD_3:
1649 WREG32(DC_HPD3_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001650 break;
1651 case RADEON_HPD_4:
1652 WREG32(DC_HPD4_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001653 break;
1654 case RADEON_HPD_5:
1655 WREG32(DC_HPD5_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001656 break;
1657 case RADEON_HPD_6:
1658 WREG32(DC_HPD6_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001659 break;
1660 default:
1661 break;
1662 }
Alex Deucher64912e92011-11-03 11:21:39 -04001663 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
Christian Koenigfb982572012-05-17 01:33:30 +02001664 enabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001665 }
Christian Koenigfb982572012-05-17 01:33:30 +02001666 radeon_irq_kms_enable_hpd(rdev, enabled);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001667}
1668
Alex Deucher377edc82012-07-17 14:02:42 -04001669/**
1670 * evergreen_hpd_fini - hpd tear down callback.
1671 *
1672 * @rdev: radeon_device pointer
1673 *
1674 * Tear down the hpd pins used by the card (evergreen+).
1675 * Disable the hpd interrupts.
1676 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001677void evergreen_hpd_fini(struct radeon_device *rdev)
1678{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001679 struct drm_device *dev = rdev->ddev;
1680 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001681 unsigned disabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001682
1683 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1684 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1685 switch (radeon_connector->hpd.hpd) {
1686 case RADEON_HPD_1:
1687 WREG32(DC_HPD1_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001688 break;
1689 case RADEON_HPD_2:
1690 WREG32(DC_HPD2_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001691 break;
1692 case RADEON_HPD_3:
1693 WREG32(DC_HPD3_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001694 break;
1695 case RADEON_HPD_4:
1696 WREG32(DC_HPD4_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001697 break;
1698 case RADEON_HPD_5:
1699 WREG32(DC_HPD5_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001700 break;
1701 case RADEON_HPD_6:
1702 WREG32(DC_HPD6_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001703 break;
1704 default:
1705 break;
1706 }
Christian Koenigfb982572012-05-17 01:33:30 +02001707 disabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001708 }
Christian Koenigfb982572012-05-17 01:33:30 +02001709 radeon_irq_kms_disable_hpd(rdev, disabled);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001710}
1711
Alex Deucherf9d9c362010-10-22 02:51:05 -04001712/* watermark setup */
1713
1714static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1715 struct radeon_crtc *radeon_crtc,
1716 struct drm_display_mode *mode,
1717 struct drm_display_mode *other_mode)
1718{
Alex Deucher12dfc842011-04-14 19:07:34 -04001719 u32 tmp;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001720 /*
1721 * Line Buffer Setup
1722 * There are 3 line buffers, each one shared by 2 display controllers.
1723 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1724 * the display controllers. The paritioning is done via one of four
1725 * preset allocations specified in bits 2:0:
1726 * first display controller
1727 * 0 - first half of lb (3840 * 2)
1728 * 1 - first 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001729 * 2 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001730 * 3 - first 1/4 of lb (1920 * 2)
1731 * second display controller
1732 * 4 - second half of lb (3840 * 2)
1733 * 5 - second 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001734 * 6 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001735 * 7 - last 1/4 of lb (1920 * 2)
1736 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001737 /* this can get tricky if we have two large displays on a paired group
1738 * of crtcs. Ideally for multiple large displays we'd assign them to
1739 * non-linked crtcs for maximum line buffer allocation.
1740 */
1741 if (radeon_crtc->base.enabled && mode) {
1742 if (other_mode)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001743 tmp = 0; /* 1/2 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001744 else
1745 tmp = 2; /* whole */
1746 } else
1747 tmp = 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001748
1749 /* second controller of the pair uses second half of the lb */
1750 if (radeon_crtc->crtc_id % 2)
1751 tmp += 4;
1752 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1753
Alex Deucher12dfc842011-04-14 19:07:34 -04001754 if (radeon_crtc->base.enabled && mode) {
1755 switch (tmp) {
1756 case 0:
1757 case 4:
1758 default:
1759 if (ASIC_IS_DCE5(rdev))
1760 return 4096 * 2;
1761 else
1762 return 3840 * 2;
1763 case 1:
1764 case 5:
1765 if (ASIC_IS_DCE5(rdev))
1766 return 6144 * 2;
1767 else
1768 return 5760 * 2;
1769 case 2:
1770 case 6:
1771 if (ASIC_IS_DCE5(rdev))
1772 return 8192 * 2;
1773 else
1774 return 7680 * 2;
1775 case 3:
1776 case 7:
1777 if (ASIC_IS_DCE5(rdev))
1778 return 2048 * 2;
1779 else
1780 return 1920 * 2;
1781 }
Alex Deucherf9d9c362010-10-22 02:51:05 -04001782 }
Alex Deucher12dfc842011-04-14 19:07:34 -04001783
1784 /* controller not enabled, so no lb used */
1785 return 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001786}
1787
Alex Deucherca7db222012-03-20 17:18:30 -04001788u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001789{
1790 u32 tmp = RREG32(MC_SHARED_CHMAP);
1791
1792 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1793 case 0:
1794 default:
1795 return 1;
1796 case 1:
1797 return 2;
1798 case 2:
1799 return 4;
1800 case 3:
1801 return 8;
1802 }
1803}
1804
1805struct evergreen_wm_params {
1806 u32 dram_channels; /* number of dram channels */
1807 u32 yclk; /* bandwidth per dram data pin in kHz */
1808 u32 sclk; /* engine clock in kHz */
1809 u32 disp_clk; /* display clock in kHz */
1810 u32 src_width; /* viewport width */
1811 u32 active_time; /* active display time in ns */
1812 u32 blank_time; /* blank time in ns */
1813 bool interlaced; /* mode is interlaced */
1814 fixed20_12 vsc; /* vertical scale ratio */
1815 u32 num_heads; /* number of active crtcs */
1816 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1817 u32 lb_size; /* line buffer allocated to pipe */
1818 u32 vtaps; /* vertical scaler taps */
1819};
1820
1821static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1822{
1823 /* Calculate DRAM Bandwidth and the part allocated to display. */
1824 fixed20_12 dram_efficiency; /* 0.7 */
1825 fixed20_12 yclk, dram_channels, bandwidth;
1826 fixed20_12 a;
1827
1828 a.full = dfixed_const(1000);
1829 yclk.full = dfixed_const(wm->yclk);
1830 yclk.full = dfixed_div(yclk, a);
1831 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1832 a.full = dfixed_const(10);
1833 dram_efficiency.full = dfixed_const(7);
1834 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1835 bandwidth.full = dfixed_mul(dram_channels, yclk);
1836 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1837
1838 return dfixed_trunc(bandwidth);
1839}
1840
1841static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1842{
1843 /* Calculate DRAM Bandwidth and the part allocated to display. */
1844 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1845 fixed20_12 yclk, dram_channels, bandwidth;
1846 fixed20_12 a;
1847
1848 a.full = dfixed_const(1000);
1849 yclk.full = dfixed_const(wm->yclk);
1850 yclk.full = dfixed_div(yclk, a);
1851 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1852 a.full = dfixed_const(10);
1853 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1854 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1855 bandwidth.full = dfixed_mul(dram_channels, yclk);
1856 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1857
1858 return dfixed_trunc(bandwidth);
1859}
1860
1861static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1862{
1863 /* Calculate the display Data return Bandwidth */
1864 fixed20_12 return_efficiency; /* 0.8 */
1865 fixed20_12 sclk, bandwidth;
1866 fixed20_12 a;
1867
1868 a.full = dfixed_const(1000);
1869 sclk.full = dfixed_const(wm->sclk);
1870 sclk.full = dfixed_div(sclk, a);
1871 a.full = dfixed_const(10);
1872 return_efficiency.full = dfixed_const(8);
1873 return_efficiency.full = dfixed_div(return_efficiency, a);
1874 a.full = dfixed_const(32);
1875 bandwidth.full = dfixed_mul(a, sclk);
1876 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1877
1878 return dfixed_trunc(bandwidth);
1879}
1880
1881static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1882{
1883 /* Calculate the DMIF Request Bandwidth */
1884 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1885 fixed20_12 disp_clk, bandwidth;
1886 fixed20_12 a;
1887
1888 a.full = dfixed_const(1000);
1889 disp_clk.full = dfixed_const(wm->disp_clk);
1890 disp_clk.full = dfixed_div(disp_clk, a);
1891 a.full = dfixed_const(10);
1892 disp_clk_request_efficiency.full = dfixed_const(8);
1893 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1894 a.full = dfixed_const(32);
1895 bandwidth.full = dfixed_mul(a, disp_clk);
1896 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1897
1898 return dfixed_trunc(bandwidth);
1899}
1900
1901static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1902{
1903 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1904 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1905 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1906 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1907
1908 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1909}
1910
1911static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1912{
1913 /* Calculate the display mode Average Bandwidth
1914 * DisplayMode should contain the source and destination dimensions,
1915 * timing, etc.
1916 */
1917 fixed20_12 bpp;
1918 fixed20_12 line_time;
1919 fixed20_12 src_width;
1920 fixed20_12 bandwidth;
1921 fixed20_12 a;
1922
1923 a.full = dfixed_const(1000);
1924 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1925 line_time.full = dfixed_div(line_time, a);
1926 bpp.full = dfixed_const(wm->bytes_per_pixel);
1927 src_width.full = dfixed_const(wm->src_width);
1928 bandwidth.full = dfixed_mul(src_width, bpp);
1929 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1930 bandwidth.full = dfixed_div(bandwidth, line_time);
1931
1932 return dfixed_trunc(bandwidth);
1933}
1934
1935static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1936{
1937 /* First calcualte the latency in ns */
1938 u32 mc_latency = 2000; /* 2000 ns. */
1939 u32 available_bandwidth = evergreen_available_bandwidth(wm);
1940 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1941 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1942 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1943 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1944 (wm->num_heads * cursor_line_pair_return_time);
1945 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1946 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1947 fixed20_12 a, b, c;
1948
1949 if (wm->num_heads == 0)
1950 return 0;
1951
1952 a.full = dfixed_const(2);
1953 b.full = dfixed_const(1);
1954 if ((wm->vsc.full > a.full) ||
1955 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1956 (wm->vtaps >= 5) ||
1957 ((wm->vsc.full >= a.full) && wm->interlaced))
1958 max_src_lines_per_dst_line = 4;
1959 else
1960 max_src_lines_per_dst_line = 2;
1961
1962 a.full = dfixed_const(available_bandwidth);
1963 b.full = dfixed_const(wm->num_heads);
1964 a.full = dfixed_div(a, b);
1965
1966 b.full = dfixed_const(1000);
1967 c.full = dfixed_const(wm->disp_clk);
1968 b.full = dfixed_div(c, b);
1969 c.full = dfixed_const(wm->bytes_per_pixel);
1970 b.full = dfixed_mul(b, c);
1971
1972 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1973
1974 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1975 b.full = dfixed_const(1000);
1976 c.full = dfixed_const(lb_fill_bw);
1977 b.full = dfixed_div(c, b);
1978 a.full = dfixed_div(a, b);
1979 line_fill_time = dfixed_trunc(a);
1980
1981 if (line_fill_time < wm->active_time)
1982 return latency;
1983 else
1984 return latency + (line_fill_time - wm->active_time);
1985
1986}
1987
1988static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1989{
1990 if (evergreen_average_bandwidth(wm) <=
1991 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
1992 return true;
1993 else
1994 return false;
1995};
1996
1997static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
1998{
1999 if (evergreen_average_bandwidth(wm) <=
2000 (evergreen_available_bandwidth(wm) / wm->num_heads))
2001 return true;
2002 else
2003 return false;
2004};
2005
2006static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2007{
2008 u32 lb_partitions = wm->lb_size / wm->src_width;
2009 u32 line_time = wm->active_time + wm->blank_time;
2010 u32 latency_tolerant_lines;
2011 u32 latency_hiding;
2012 fixed20_12 a;
2013
2014 a.full = dfixed_const(1);
2015 if (wm->vsc.full > a.full)
2016 latency_tolerant_lines = 1;
2017 else {
2018 if (lb_partitions <= (wm->vtaps + 1))
2019 latency_tolerant_lines = 1;
2020 else
2021 latency_tolerant_lines = 2;
2022 }
2023
2024 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2025
2026 if (evergreen_latency_watermark(wm) <= latency_hiding)
2027 return true;
2028 else
2029 return false;
2030}
2031
2032static void evergreen_program_watermarks(struct radeon_device *rdev,
2033 struct radeon_crtc *radeon_crtc,
2034 u32 lb_size, u32 num_heads)
2035{
2036 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2037 struct evergreen_wm_params wm;
2038 u32 pixel_period;
2039 u32 line_time = 0;
2040 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2041 u32 priority_a_mark = 0, priority_b_mark = 0;
2042 u32 priority_a_cnt = PRIORITY_OFF;
2043 u32 priority_b_cnt = PRIORITY_OFF;
2044 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2045 u32 tmp, arb_control3;
2046 fixed20_12 a, b, c;
2047
2048 if (radeon_crtc->base.enabled && num_heads && mode) {
2049 pixel_period = 1000000 / (u32)mode->clock;
2050 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2051 priority_a_cnt = 0;
2052 priority_b_cnt = 0;
2053
2054 wm.yclk = rdev->pm.current_mclk * 10;
2055 wm.sclk = rdev->pm.current_sclk * 10;
2056 wm.disp_clk = mode->clock;
2057 wm.src_width = mode->crtc_hdisplay;
2058 wm.active_time = mode->crtc_hdisplay * pixel_period;
2059 wm.blank_time = line_time - wm.active_time;
2060 wm.interlaced = false;
2061 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2062 wm.interlaced = true;
2063 wm.vsc = radeon_crtc->vsc;
2064 wm.vtaps = 1;
2065 if (radeon_crtc->rmx_type != RMX_OFF)
2066 wm.vtaps = 2;
2067 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2068 wm.lb_size = lb_size;
2069 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2070 wm.num_heads = num_heads;
2071
2072 /* set for high clocks */
2073 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2074 /* set for low clocks */
2075 /* wm.yclk = low clk; wm.sclk = low clk */
2076 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2077
2078 /* possibly force display priority to high */
2079 /* should really do this at mode validation time... */
2080 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2081 !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2082 !evergreen_check_latency_hiding(&wm) ||
2083 (rdev->disp_priority == 2)) {
Alex Deucher92bdfd42011-08-04 17:28:40 +00002084 DRM_DEBUG_KMS("force priority to high\n");
Alex Deucherf9d9c362010-10-22 02:51:05 -04002085 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2086 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2087 }
2088
2089 a.full = dfixed_const(1000);
2090 b.full = dfixed_const(mode->clock);
2091 b.full = dfixed_div(b, a);
2092 c.full = dfixed_const(latency_watermark_a);
2093 c.full = dfixed_mul(c, b);
2094 c.full = dfixed_mul(c, radeon_crtc->hsc);
2095 c.full = dfixed_div(c, a);
2096 a.full = dfixed_const(16);
2097 c.full = dfixed_div(c, a);
2098 priority_a_mark = dfixed_trunc(c);
2099 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2100
2101 a.full = dfixed_const(1000);
2102 b.full = dfixed_const(mode->clock);
2103 b.full = dfixed_div(b, a);
2104 c.full = dfixed_const(latency_watermark_b);
2105 c.full = dfixed_mul(c, b);
2106 c.full = dfixed_mul(c, radeon_crtc->hsc);
2107 c.full = dfixed_div(c, a);
2108 a.full = dfixed_const(16);
2109 c.full = dfixed_div(c, a);
2110 priority_b_mark = dfixed_trunc(c);
2111 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2112 }
2113
2114 /* select wm A */
2115 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2116 tmp = arb_control3;
2117 tmp &= ~LATENCY_WATERMARK_MASK(3);
2118 tmp |= LATENCY_WATERMARK_MASK(1);
2119 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2120 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2121 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2122 LATENCY_HIGH_WATERMARK(line_time)));
2123 /* select wm B */
2124 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2125 tmp &= ~LATENCY_WATERMARK_MASK(3);
2126 tmp |= LATENCY_WATERMARK_MASK(2);
2127 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2128 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2129 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2130 LATENCY_HIGH_WATERMARK(line_time)));
2131 /* restore original selection */
2132 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2133
2134 /* write the priority marks */
2135 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2136 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2137
2138}
2139
Alex Deucher377edc82012-07-17 14:02:42 -04002140/**
2141 * evergreen_bandwidth_update - update display watermarks callback.
2142 *
2143 * @rdev: radeon_device pointer
2144 *
2145 * Update the display watermarks based on the requested mode(s)
2146 * (evergreen+).
2147 */
Alex Deucher0ca2ab52010-02-26 13:57:45 -05002148void evergreen_bandwidth_update(struct radeon_device *rdev)
2149{
Alex Deucherf9d9c362010-10-22 02:51:05 -04002150 struct drm_display_mode *mode0 = NULL;
2151 struct drm_display_mode *mode1 = NULL;
2152 u32 num_heads = 0, lb_size;
2153 int i;
2154
2155 radeon_update_display_priority(rdev);
2156
2157 for (i = 0; i < rdev->num_crtc; i++) {
2158 if (rdev->mode_info.crtcs[i]->base.enabled)
2159 num_heads++;
2160 }
2161 for (i = 0; i < rdev->num_crtc; i += 2) {
2162 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2163 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2164 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2165 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2166 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2167 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2168 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002169}
2170
Alex Deucher377edc82012-07-17 14:02:42 -04002171/**
2172 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2173 *
2174 * @rdev: radeon_device pointer
2175 *
2176 * Wait for the MC (memory controller) to be idle.
2177 * (evergreen+).
2178 * Returns 0 if the MC is idle, -1 if not.
2179 */
Alex Deucherb9952a82011-03-02 20:07:33 -05002180int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002181{
2182 unsigned i;
2183 u32 tmp;
2184
2185 for (i = 0; i < rdev->usec_timeout; i++) {
2186 /* read MC_STATUS */
2187 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2188 if (!tmp)
2189 return 0;
2190 udelay(1);
2191 }
2192 return -1;
2193}
2194
2195/*
2196 * GART
2197 */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002198void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2199{
2200 unsigned i;
2201 u32 tmp;
2202
Alex Deucher6f2f48a2010-12-15 11:01:56 -05002203 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2204
Alex Deucher0fcdb612010-03-24 13:20:41 -04002205 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2206 for (i = 0; i < rdev->usec_timeout; i++) {
2207 /* read MC_STATUS */
2208 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2209 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2210 if (tmp == 2) {
2211 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2212 return;
2213 }
2214 if (tmp) {
2215 return;
2216 }
2217 udelay(1);
2218 }
2219}
2220
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002221static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002222{
2223 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002224 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002225
Jerome Glissec9a1be92011-11-03 11:16:49 -04002226 if (rdev->gart.robj == NULL) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002227 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2228 return -EINVAL;
2229 }
2230 r = radeon_gart_table_vram_pin(rdev);
2231 if (r)
2232 return r;
Dave Airlie82568562010-02-05 16:00:07 +10002233 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002234 /* Setup L2 cache */
2235 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2236 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2237 EFFECTIVE_L2_QUEUE_SIZE(7));
2238 WREG32(VM_L2_CNTL2, 0);
2239 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2240 /* Setup TLB control */
2241 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2242 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2243 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2244 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002245 if (rdev->flags & RADEON_IS_IGP) {
2246 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2247 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2248 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2249 } else {
2250 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2251 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2252 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
Alex Deucher0b8c30b2012-05-31 18:54:43 -04002253 if ((rdev->family == CHIP_JUNIPER) ||
2254 (rdev->family == CHIP_CYPRESS) ||
2255 (rdev->family == CHIP_HEMLOCK) ||
2256 (rdev->family == CHIP_BARTS))
2257 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002258 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002259 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2260 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2261 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2262 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2263 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2264 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2265 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2266 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2267 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2268 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2269 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -04002270 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002271
Alex Deucher0fcdb612010-03-24 13:20:41 -04002272 evergreen_pcie_gart_tlb_flush(rdev);
Tormod Voldenfcf4de52011-08-31 21:54:07 +00002273 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2274 (unsigned)(rdev->mc.gtt_size >> 20),
2275 (unsigned long long)rdev->gart.table_addr);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002276 rdev->gart.ready = true;
2277 return 0;
2278}
2279
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002280static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002281{
2282 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002283
2284 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002285 WREG32(VM_CONTEXT0_CNTL, 0);
2286 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002287
2288 /* Setup L2 cache */
2289 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2290 EFFECTIVE_L2_QUEUE_SIZE(7));
2291 WREG32(VM_L2_CNTL2, 0);
2292 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2293 /* Setup TLB control */
2294 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2295 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2296 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2297 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2298 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2299 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2300 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2301 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Jerome Glissec9a1be92011-11-03 11:16:49 -04002302 radeon_gart_table_vram_unpin(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002303}
2304
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002305static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002306{
2307 evergreen_pcie_gart_disable(rdev);
2308 radeon_gart_table_vram_free(rdev);
2309 radeon_gart_fini(rdev);
2310}
2311
2312
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002313static void evergreen_agp_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002314{
2315 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002316
2317 /* Setup L2 cache */
2318 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2319 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2320 EFFECTIVE_L2_QUEUE_SIZE(7));
2321 WREG32(VM_L2_CNTL2, 0);
2322 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2323 /* Setup TLB control */
2324 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2325 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2326 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2327 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2328 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2329 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2330 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2331 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2332 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2333 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2334 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002335 WREG32(VM_CONTEXT0_CNTL, 0);
2336 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002337}
2338
Alex Deucherb9952a82011-03-02 20:07:33 -05002339void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002340{
Alex Deucher62444b72012-08-15 17:18:42 -04002341 u32 crtc_enabled, tmp, frame_count, blackout;
2342 int i, j;
2343
Alex Deucher51535502012-08-30 14:34:30 -04002344 if (!ASIC_IS_NODCE(rdev)) {
2345 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2346 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002347
Alex Deucher51535502012-08-30 14:34:30 -04002348 /* disable VGA render */
2349 WREG32(VGA_RENDER_CONTROL, 0);
2350 }
Alex Deucher62444b72012-08-15 17:18:42 -04002351 /* blank the display controllers */
2352 for (i = 0; i < rdev->num_crtc; i++) {
2353 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2354 if (crtc_enabled) {
2355 save->crtc_enabled[i] = true;
2356 if (ASIC_IS_DCE6(rdev)) {
2357 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2358 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2359 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002360 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002361 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2362 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2363 }
2364 } else {
2365 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2366 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2367 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002368 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002369 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2370 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Alex Deucherabf14572013-04-10 19:08:14 -04002371 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002372 }
2373 }
2374 /* wait for the next frame */
2375 frame_count = radeon_get_vblank_counter(rdev, i);
2376 for (j = 0; j < rdev->usec_timeout; j++) {
2377 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2378 break;
2379 udelay(1);
2380 }
Alex Deucherabf14572013-04-10 19:08:14 -04002381
2382 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2383 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2384 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2385 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2386 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2387 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2388 save->crtc_enabled[i] = false;
2389 /* ***** */
Alex Deucher804cc4a02012-11-19 09:11:27 -05002390 } else {
2391 save->crtc_enabled[i] = false;
Alex Deucher62444b72012-08-15 17:18:42 -04002392 }
Alex Deucher18007402010-11-22 17:56:28 -05002393 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002394
Alex Deucher62444b72012-08-15 17:18:42 -04002395 radeon_mc_wait_for_idle(rdev);
2396
2397 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2398 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2399 /* Block CPU access */
2400 WREG32(BIF_FB_EN, 0);
2401 /* blackout the MC */
2402 blackout &= ~BLACKOUT_MODE_MASK;
2403 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
Alex Deucherb7eff392011-07-08 11:44:56 -04002404 }
Alex Deuchered39fad2013-01-31 09:00:52 -05002405 /* wait for the MC to settle */
2406 udelay(100);
Alex Deucher968c0162013-04-10 09:58:42 -04002407
2408 /* lock double buffered regs */
2409 for (i = 0; i < rdev->num_crtc; i++) {
2410 if (save->crtc_enabled[i]) {
2411 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2412 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2413 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2414 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2415 }
2416 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2417 if (!(tmp & 1)) {
2418 tmp |= 1;
2419 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2420 }
2421 }
2422 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002423}
2424
Alex Deucherb9952a82011-03-02 20:07:33 -05002425void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002426{
Alex Deucher62444b72012-08-15 17:18:42 -04002427 u32 tmp, frame_count;
2428 int i, j;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002429
Alex Deucher62444b72012-08-15 17:18:42 -04002430 /* update crtc base addresses */
2431 for (i = 0; i < rdev->num_crtc; i++) {
2432 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002433 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002434 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002435 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002436 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002437 (u32)rdev->mc.vram_start);
Alex Deucher62444b72012-08-15 17:18:42 -04002438 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002439 (u32)rdev->mc.vram_start);
Alex Deucherb7eff392011-07-08 11:44:56 -04002440 }
Alex Deucher51535502012-08-30 14:34:30 -04002441
2442 if (!ASIC_IS_NODCE(rdev)) {
2443 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2444 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2445 }
Alex Deucher62444b72012-08-15 17:18:42 -04002446
Alex Deucher968c0162013-04-10 09:58:42 -04002447 /* unlock regs and wait for update */
2448 for (i = 0; i < rdev->num_crtc; i++) {
2449 if (save->crtc_enabled[i]) {
2450 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2451 if ((tmp & 0x3) != 0) {
2452 tmp &= ~0x3;
2453 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2454 }
2455 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2456 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2457 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2458 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2459 }
2460 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2461 if (tmp & 1) {
2462 tmp &= ~1;
2463 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2464 }
2465 for (j = 0; j < rdev->usec_timeout; j++) {
2466 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2467 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2468 break;
2469 udelay(1);
2470 }
2471 }
2472 }
2473
Alex Deucher62444b72012-08-15 17:18:42 -04002474 /* unblackout the MC */
2475 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2476 tmp &= ~BLACKOUT_MODE_MASK;
2477 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2478 /* allow CPU access */
2479 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2480
2481 for (i = 0; i < rdev->num_crtc; i++) {
Alex Deucher695ddeb2012-11-05 16:34:58 +00002482 if (save->crtc_enabled[i]) {
Alex Deucher62444b72012-08-15 17:18:42 -04002483 if (ASIC_IS_DCE6(rdev)) {
2484 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2485 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
Christopher Staitebb5888202013-01-26 11:10:58 -05002486 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002487 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002488 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002489 } else {
2490 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2491 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
Christopher Staitebb5888202013-01-26 11:10:58 -05002492 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002493 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002494 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002495 }
2496 /* wait for the next frame */
2497 frame_count = radeon_get_vblank_counter(rdev, i);
2498 for (j = 0; j < rdev->usec_timeout; j++) {
2499 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2500 break;
2501 udelay(1);
2502 }
2503 }
2504 }
Alex Deucher51535502012-08-30 14:34:30 -04002505 if (!ASIC_IS_NODCE(rdev)) {
2506 /* Unlock vga access */
2507 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2508 mdelay(1);
2509 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2510 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002511}
2512
Alex Deucher755d8192011-03-02 20:07:34 -05002513void evergreen_mc_program(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002514{
2515 struct evergreen_mc_save save;
2516 u32 tmp;
2517 int i, j;
2518
2519 /* Initialize HDP */
2520 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2521 WREG32((0x2c14 + j), 0x00000000);
2522 WREG32((0x2c18 + j), 0x00000000);
2523 WREG32((0x2c1c + j), 0x00000000);
2524 WREG32((0x2c20 + j), 0x00000000);
2525 WREG32((0x2c24 + j), 0x00000000);
2526 }
2527 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2528
2529 evergreen_mc_stop(rdev, &save);
2530 if (evergreen_mc_wait_for_idle(rdev)) {
2531 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2532 }
2533 /* Lockout access through VGA aperture*/
2534 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2535 /* Update configuration */
2536 if (rdev->flags & RADEON_IS_AGP) {
2537 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2538 /* VRAM before AGP */
2539 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2540 rdev->mc.vram_start >> 12);
2541 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2542 rdev->mc.gtt_end >> 12);
2543 } else {
2544 /* VRAM after AGP */
2545 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2546 rdev->mc.gtt_start >> 12);
2547 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2548 rdev->mc.vram_end >> 12);
2549 }
2550 } else {
2551 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2552 rdev->mc.vram_start >> 12);
2553 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2554 rdev->mc.vram_end >> 12);
2555 }
Alex Deucher3b9832f2011-11-10 08:59:39 -05002556 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
Alex Deucher05b3ef62012-03-20 17:18:37 -04002557 /* llano/ontario only */
2558 if ((rdev->family == CHIP_PALM) ||
2559 (rdev->family == CHIP_SUMO) ||
2560 (rdev->family == CHIP_SUMO2)) {
Alex Deucherb4183e32010-12-15 11:04:10 -05002561 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2562 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2563 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2564 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2565 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002566 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2567 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2568 WREG32(MC_VM_FB_LOCATION, tmp);
2569 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
Alex Deucherc46cb4d2011-01-06 19:12:37 -05002570 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
Jerome Glisse46fcd2b2010-06-03 19:34:48 +02002571 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002572 if (rdev->flags & RADEON_IS_AGP) {
2573 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2574 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2575 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2576 } else {
2577 WREG32(MC_VM_AGP_BASE, 0);
2578 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2579 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2580 }
2581 if (evergreen_mc_wait_for_idle(rdev)) {
2582 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2583 }
2584 evergreen_mc_resume(rdev, &save);
2585 /* we need to own VRAM, so turn off the VGA renderer here
2586 * to stop it overwriting our objects */
2587 rv515_vga_render_disable(rdev);
2588}
2589
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002590/*
2591 * CP.
2592 */
Alex Deucher12920592011-02-02 12:37:40 -05002593void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2594{
Christian König876dc9f2012-05-08 14:24:01 +02002595 struct radeon_ring *ring = &rdev->ring[ib->ring];
Alex Deucher89d35802012-07-17 14:02:31 -04002596 u32 next_rptr;
Christian König7b1f2482011-09-23 15:11:23 +02002597
Alex Deucher12920592011-02-02 12:37:40 -05002598 /* set to DX10/11 mode */
Christian Könige32eb502011-10-23 12:56:27 +02002599 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2600 radeon_ring_write(ring, 1);
Christian König45df6802012-07-06 16:22:55 +02002601
2602 if (ring->rptr_save_reg) {
Alex Deucher89d35802012-07-17 14:02:31 -04002603 next_rptr = ring->wptr + 3 + 4;
Christian König45df6802012-07-06 16:22:55 +02002604 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2605 radeon_ring_write(ring, ((ring->rptr_save_reg -
2606 PACKET3_SET_CONFIG_REG_START) >> 2));
2607 radeon_ring_write(ring, next_rptr);
Alex Deucher89d35802012-07-17 14:02:31 -04002608 } else if (rdev->wb.enabled) {
2609 next_rptr = ring->wptr + 5 + 4;
2610 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2611 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2612 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2613 radeon_ring_write(ring, next_rptr);
2614 radeon_ring_write(ring, 0);
Christian König45df6802012-07-06 16:22:55 +02002615 }
2616
Christian Könige32eb502011-10-23 12:56:27 +02002617 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2618 radeon_ring_write(ring,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002619#ifdef __BIG_ENDIAN
2620 (2 << 0) |
2621#endif
2622 (ib->gpu_addr & 0xFFFFFFFC));
Christian Könige32eb502011-10-23 12:56:27 +02002623 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2624 radeon_ring_write(ring, ib->length_dw);
Alex Deucher12920592011-02-02 12:37:40 -05002625}
2626
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002627
2628static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2629{
Alex Deucherfe251e22010-03-24 13:36:43 -04002630 const __be32 *fw_data;
2631 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002632
Alex Deucherfe251e22010-03-24 13:36:43 -04002633 if (!rdev->me_fw || !rdev->pfp_fw)
2634 return -EINVAL;
2635
2636 r700_cp_stop(rdev);
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002637 WREG32(CP_RB_CNTL,
2638#ifdef __BIG_ENDIAN
2639 BUF_SWAP_32BIT |
2640#endif
2641 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
Alex Deucherfe251e22010-03-24 13:36:43 -04002642
2643 fw_data = (const __be32 *)rdev->pfp_fw->data;
2644 WREG32(CP_PFP_UCODE_ADDR, 0);
2645 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2646 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2647 WREG32(CP_PFP_UCODE_ADDR, 0);
2648
2649 fw_data = (const __be32 *)rdev->me_fw->data;
2650 WREG32(CP_ME_RAM_WADDR, 0);
2651 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2652 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2653
2654 WREG32(CP_PFP_UCODE_ADDR, 0);
2655 WREG32(CP_ME_RAM_WADDR, 0);
2656 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002657 return 0;
2658}
2659
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002660static int evergreen_cp_start(struct radeon_device *rdev)
2661{
Christian Könige32eb502011-10-23 12:56:27 +02002662 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucher2281a372010-10-21 13:31:38 -04002663 int r, i;
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002664 uint32_t cp_me;
2665
Christian Könige32eb502011-10-23 12:56:27 +02002666 r = radeon_ring_lock(rdev, ring, 7);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002667 if (r) {
2668 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2669 return r;
2670 }
Christian Könige32eb502011-10-23 12:56:27 +02002671 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2672 radeon_ring_write(ring, 0x1);
2673 radeon_ring_write(ring, 0x0);
2674 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2675 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2676 radeon_ring_write(ring, 0);
2677 radeon_ring_write(ring, 0);
2678 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002679
2680 cp_me = 0xff;
2681 WREG32(CP_ME_CNTL, cp_me);
2682
Christian Könige32eb502011-10-23 12:56:27 +02002683 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002684 if (r) {
2685 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2686 return r;
2687 }
Alex Deucher2281a372010-10-21 13:31:38 -04002688
2689 /* setup clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002690 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2691 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002692
2693 for (i = 0; i < evergreen_default_size; i++)
Christian Könige32eb502011-10-23 12:56:27 +02002694 radeon_ring_write(ring, evergreen_default_state[i]);
Alex Deucher2281a372010-10-21 13:31:38 -04002695
Christian Könige32eb502011-10-23 12:56:27 +02002696 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2697 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002698
2699 /* set clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002700 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2701 radeon_ring_write(ring, 0);
Alex Deucher2281a372010-10-21 13:31:38 -04002702
2703 /* SQ_VTX_BASE_VTX_LOC */
Christian Könige32eb502011-10-23 12:56:27 +02002704 radeon_ring_write(ring, 0xc0026f00);
2705 radeon_ring_write(ring, 0x00000000);
2706 radeon_ring_write(ring, 0x00000000);
2707 radeon_ring_write(ring, 0x00000000);
Alex Deucher2281a372010-10-21 13:31:38 -04002708
2709 /* Clear consts */
Christian Könige32eb502011-10-23 12:56:27 +02002710 radeon_ring_write(ring, 0xc0036f00);
2711 radeon_ring_write(ring, 0x00000bc4);
2712 radeon_ring_write(ring, 0xffffffff);
2713 radeon_ring_write(ring, 0xffffffff);
2714 radeon_ring_write(ring, 0xffffffff);
Alex Deucher2281a372010-10-21 13:31:38 -04002715
Christian Könige32eb502011-10-23 12:56:27 +02002716 radeon_ring_write(ring, 0xc0026900);
2717 radeon_ring_write(ring, 0x00000316);
2718 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2719 radeon_ring_write(ring, 0x00000010); /* */
Alex Deucher18ff84d2011-02-02 12:37:41 -05002720
Christian Könige32eb502011-10-23 12:56:27 +02002721 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002722
2723 return 0;
2724}
2725
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002726static int evergreen_cp_resume(struct radeon_device *rdev)
Alex Deucherfe251e22010-03-24 13:36:43 -04002727{
Christian Könige32eb502011-10-23 12:56:27 +02002728 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucherfe251e22010-03-24 13:36:43 -04002729 u32 tmp;
2730 u32 rb_bufsz;
2731 int r;
2732
2733 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2734 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2735 SOFT_RESET_PA |
2736 SOFT_RESET_SH |
2737 SOFT_RESET_VGT |
Jerome Glissea49a50d2011-08-24 20:00:17 +00002738 SOFT_RESET_SPI |
Alex Deucherfe251e22010-03-24 13:36:43 -04002739 SOFT_RESET_SX));
2740 RREG32(GRBM_SOFT_RESET);
2741 mdelay(15);
2742 WREG32(GRBM_SOFT_RESET, 0);
2743 RREG32(GRBM_SOFT_RESET);
2744
2745 /* Set ring buffer size */
Christian Könige32eb502011-10-23 12:56:27 +02002746 rb_bufsz = drm_order(ring->ring_size / 8);
Alex Deucher724c80e2010-08-27 18:25:25 -04002747 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
Alex Deucherfe251e22010-03-24 13:36:43 -04002748#ifdef __BIG_ENDIAN
2749 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002750#endif
Alex Deucherfe251e22010-03-24 13:36:43 -04002751 WREG32(CP_RB_CNTL, tmp);
Christian König15d33322011-09-15 19:02:22 +02002752 WREG32(CP_SEM_WAIT_TIMER, 0x0);
Alex Deucher11ef3f1f2012-01-20 14:47:43 -05002753 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
Alex Deucherfe251e22010-03-24 13:36:43 -04002754
2755 /* Set the write pointer delay */
2756 WREG32(CP_RB_WPTR_DELAY, 0);
2757
2758 /* Initialize the ring buffer's read and write pointers */
2759 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2760 WREG32(CP_RB_RPTR_WR, 0);
Christian Könige32eb502011-10-23 12:56:27 +02002761 ring->wptr = 0;
2762 WREG32(CP_RB_WPTR, ring->wptr);
Alex Deucher724c80e2010-08-27 18:25:25 -04002763
Adam Buchbinder48fc7f72012-09-19 21:48:00 -04002764 /* set the wb address whether it's enabled or not */
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002765 WREG32(CP_RB_RPTR_ADDR,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002766 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
Alex Deucher724c80e2010-08-27 18:25:25 -04002767 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2768 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2769
2770 if (rdev->wb.enabled)
2771 WREG32(SCRATCH_UMSK, 0xff);
2772 else {
2773 tmp |= RB_NO_UPDATE;
2774 WREG32(SCRATCH_UMSK, 0);
2775 }
2776
Alex Deucherfe251e22010-03-24 13:36:43 -04002777 mdelay(1);
2778 WREG32(CP_RB_CNTL, tmp);
2779
Christian Könige32eb502011-10-23 12:56:27 +02002780 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
Alex Deucherfe251e22010-03-24 13:36:43 -04002781 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2782
Christian Könige32eb502011-10-23 12:56:27 +02002783 ring->rptr = RREG32(CP_RB_RPTR);
Alex Deucherfe251e22010-03-24 13:36:43 -04002784
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002785 evergreen_cp_start(rdev);
Christian Könige32eb502011-10-23 12:56:27 +02002786 ring->ready = true;
Alex Deucherf7128122012-02-23 17:53:45 -05002787 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
Alex Deucherfe251e22010-03-24 13:36:43 -04002788 if (r) {
Christian Könige32eb502011-10-23 12:56:27 +02002789 ring->ready = false;
Alex Deucherfe251e22010-03-24 13:36:43 -04002790 return r;
2791 }
2792 return 0;
2793}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002794
2795/*
2796 * Core functions
2797 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002798static void evergreen_gpu_init(struct radeon_device *rdev)
2799{
Alex Deucher416a2bd2012-05-31 19:00:25 -04002800 u32 gb_addr_config;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002801 u32 mc_shared_chmap, mc_arb_ramcfg;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002802 u32 sx_debug_1;
2803 u32 smx_dc_ctl0;
2804 u32 sq_config;
2805 u32 sq_lds_resource_mgmt;
2806 u32 sq_gpr_resource_mgmt_1;
2807 u32 sq_gpr_resource_mgmt_2;
2808 u32 sq_gpr_resource_mgmt_3;
2809 u32 sq_thread_resource_mgmt;
2810 u32 sq_thread_resource_mgmt_2;
2811 u32 sq_stack_resource_mgmt_1;
2812 u32 sq_stack_resource_mgmt_2;
2813 u32 sq_stack_resource_mgmt_3;
2814 u32 vgt_cache_invalidation;
Alex Deucherf25a5c62011-05-19 11:07:57 -04002815 u32 hdp_host_path_cntl, tmp;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002816 u32 disabled_rb_mask;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002817 int i, j, num_shader_engines, ps_thread_count;
2818
2819 switch (rdev->family) {
2820 case CHIP_CYPRESS:
2821 case CHIP_HEMLOCK:
2822 rdev->config.evergreen.num_ses = 2;
2823 rdev->config.evergreen.max_pipes = 4;
2824 rdev->config.evergreen.max_tile_pipes = 8;
2825 rdev->config.evergreen.max_simds = 10;
2826 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2827 rdev->config.evergreen.max_gprs = 256;
2828 rdev->config.evergreen.max_threads = 248;
2829 rdev->config.evergreen.max_gs_threads = 32;
2830 rdev->config.evergreen.max_stack_entries = 512;
2831 rdev->config.evergreen.sx_num_of_sets = 4;
2832 rdev->config.evergreen.sx_max_export_size = 256;
2833 rdev->config.evergreen.sx_max_export_pos_size = 64;
2834 rdev->config.evergreen.sx_max_export_smx_size = 192;
2835 rdev->config.evergreen.max_hw_contexts = 8;
2836 rdev->config.evergreen.sq_num_cf_insts = 2;
2837
2838 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2839 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2840 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002841 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002842 break;
2843 case CHIP_JUNIPER:
2844 rdev->config.evergreen.num_ses = 1;
2845 rdev->config.evergreen.max_pipes = 4;
2846 rdev->config.evergreen.max_tile_pipes = 4;
2847 rdev->config.evergreen.max_simds = 10;
2848 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2849 rdev->config.evergreen.max_gprs = 256;
2850 rdev->config.evergreen.max_threads = 248;
2851 rdev->config.evergreen.max_gs_threads = 32;
2852 rdev->config.evergreen.max_stack_entries = 512;
2853 rdev->config.evergreen.sx_num_of_sets = 4;
2854 rdev->config.evergreen.sx_max_export_size = 256;
2855 rdev->config.evergreen.sx_max_export_pos_size = 64;
2856 rdev->config.evergreen.sx_max_export_smx_size = 192;
2857 rdev->config.evergreen.max_hw_contexts = 8;
2858 rdev->config.evergreen.sq_num_cf_insts = 2;
2859
2860 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2861 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2862 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002863 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002864 break;
2865 case CHIP_REDWOOD:
2866 rdev->config.evergreen.num_ses = 1;
2867 rdev->config.evergreen.max_pipes = 4;
2868 rdev->config.evergreen.max_tile_pipes = 4;
2869 rdev->config.evergreen.max_simds = 5;
2870 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2871 rdev->config.evergreen.max_gprs = 256;
2872 rdev->config.evergreen.max_threads = 248;
2873 rdev->config.evergreen.max_gs_threads = 32;
2874 rdev->config.evergreen.max_stack_entries = 256;
2875 rdev->config.evergreen.sx_num_of_sets = 4;
2876 rdev->config.evergreen.sx_max_export_size = 256;
2877 rdev->config.evergreen.sx_max_export_pos_size = 64;
2878 rdev->config.evergreen.sx_max_export_smx_size = 192;
2879 rdev->config.evergreen.max_hw_contexts = 8;
2880 rdev->config.evergreen.sq_num_cf_insts = 2;
2881
2882 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2883 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2884 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002885 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002886 break;
2887 case CHIP_CEDAR:
2888 default:
2889 rdev->config.evergreen.num_ses = 1;
2890 rdev->config.evergreen.max_pipes = 2;
2891 rdev->config.evergreen.max_tile_pipes = 2;
2892 rdev->config.evergreen.max_simds = 2;
2893 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2894 rdev->config.evergreen.max_gprs = 256;
2895 rdev->config.evergreen.max_threads = 192;
2896 rdev->config.evergreen.max_gs_threads = 16;
2897 rdev->config.evergreen.max_stack_entries = 256;
2898 rdev->config.evergreen.sx_num_of_sets = 4;
2899 rdev->config.evergreen.sx_max_export_size = 128;
2900 rdev->config.evergreen.sx_max_export_pos_size = 32;
2901 rdev->config.evergreen.sx_max_export_smx_size = 96;
2902 rdev->config.evergreen.max_hw_contexts = 4;
2903 rdev->config.evergreen.sq_num_cf_insts = 1;
2904
2905 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2906 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2907 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002908 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002909 break;
Alex Deucherd5e455e2010-11-22 17:56:29 -05002910 case CHIP_PALM:
2911 rdev->config.evergreen.num_ses = 1;
2912 rdev->config.evergreen.max_pipes = 2;
2913 rdev->config.evergreen.max_tile_pipes = 2;
2914 rdev->config.evergreen.max_simds = 2;
2915 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2916 rdev->config.evergreen.max_gprs = 256;
2917 rdev->config.evergreen.max_threads = 192;
2918 rdev->config.evergreen.max_gs_threads = 16;
2919 rdev->config.evergreen.max_stack_entries = 256;
2920 rdev->config.evergreen.sx_num_of_sets = 4;
2921 rdev->config.evergreen.sx_max_export_size = 128;
2922 rdev->config.evergreen.sx_max_export_pos_size = 32;
2923 rdev->config.evergreen.sx_max_export_smx_size = 96;
2924 rdev->config.evergreen.max_hw_contexts = 4;
2925 rdev->config.evergreen.sq_num_cf_insts = 1;
2926
2927 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2928 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2929 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002930 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5e455e2010-11-22 17:56:29 -05002931 break;
Alex Deucherd5c5a722011-05-31 15:42:48 -04002932 case CHIP_SUMO:
2933 rdev->config.evergreen.num_ses = 1;
2934 rdev->config.evergreen.max_pipes = 4;
Jerome Glissebd25f072012-12-11 11:56:52 -05002935 rdev->config.evergreen.max_tile_pipes = 4;
Alex Deucherd5c5a722011-05-31 15:42:48 -04002936 if (rdev->pdev->device == 0x9648)
2937 rdev->config.evergreen.max_simds = 3;
2938 else if ((rdev->pdev->device == 0x9647) ||
2939 (rdev->pdev->device == 0x964a))
2940 rdev->config.evergreen.max_simds = 4;
2941 else
2942 rdev->config.evergreen.max_simds = 5;
2943 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2944 rdev->config.evergreen.max_gprs = 256;
2945 rdev->config.evergreen.max_threads = 248;
2946 rdev->config.evergreen.max_gs_threads = 32;
2947 rdev->config.evergreen.max_stack_entries = 256;
2948 rdev->config.evergreen.sx_num_of_sets = 4;
2949 rdev->config.evergreen.sx_max_export_size = 256;
2950 rdev->config.evergreen.sx_max_export_pos_size = 64;
2951 rdev->config.evergreen.sx_max_export_smx_size = 192;
2952 rdev->config.evergreen.max_hw_contexts = 8;
2953 rdev->config.evergreen.sq_num_cf_insts = 2;
2954
2955 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2956 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2957 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05002958 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04002959 break;
2960 case CHIP_SUMO2:
2961 rdev->config.evergreen.num_ses = 1;
2962 rdev->config.evergreen.max_pipes = 4;
2963 rdev->config.evergreen.max_tile_pipes = 4;
2964 rdev->config.evergreen.max_simds = 2;
2965 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2966 rdev->config.evergreen.max_gprs = 256;
2967 rdev->config.evergreen.max_threads = 248;
2968 rdev->config.evergreen.max_gs_threads = 32;
2969 rdev->config.evergreen.max_stack_entries = 512;
2970 rdev->config.evergreen.sx_num_of_sets = 4;
2971 rdev->config.evergreen.sx_max_export_size = 256;
2972 rdev->config.evergreen.sx_max_export_pos_size = 64;
2973 rdev->config.evergreen.sx_max_export_smx_size = 192;
2974 rdev->config.evergreen.max_hw_contexts = 8;
2975 rdev->config.evergreen.sq_num_cf_insts = 2;
2976
2977 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2978 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2979 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05002980 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04002981 break;
Alex Deucheradb68fa2011-01-06 21:19:24 -05002982 case CHIP_BARTS:
2983 rdev->config.evergreen.num_ses = 2;
2984 rdev->config.evergreen.max_pipes = 4;
2985 rdev->config.evergreen.max_tile_pipes = 8;
2986 rdev->config.evergreen.max_simds = 7;
2987 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2988 rdev->config.evergreen.max_gprs = 256;
2989 rdev->config.evergreen.max_threads = 248;
2990 rdev->config.evergreen.max_gs_threads = 32;
2991 rdev->config.evergreen.max_stack_entries = 512;
2992 rdev->config.evergreen.sx_num_of_sets = 4;
2993 rdev->config.evergreen.sx_max_export_size = 256;
2994 rdev->config.evergreen.sx_max_export_pos_size = 64;
2995 rdev->config.evergreen.sx_max_export_smx_size = 192;
2996 rdev->config.evergreen.max_hw_contexts = 8;
2997 rdev->config.evergreen.sq_num_cf_insts = 2;
2998
2999 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3000 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3001 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003002 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003003 break;
3004 case CHIP_TURKS:
3005 rdev->config.evergreen.num_ses = 1;
3006 rdev->config.evergreen.max_pipes = 4;
3007 rdev->config.evergreen.max_tile_pipes = 4;
3008 rdev->config.evergreen.max_simds = 6;
3009 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3010 rdev->config.evergreen.max_gprs = 256;
3011 rdev->config.evergreen.max_threads = 248;
3012 rdev->config.evergreen.max_gs_threads = 32;
3013 rdev->config.evergreen.max_stack_entries = 256;
3014 rdev->config.evergreen.sx_num_of_sets = 4;
3015 rdev->config.evergreen.sx_max_export_size = 256;
3016 rdev->config.evergreen.sx_max_export_pos_size = 64;
3017 rdev->config.evergreen.sx_max_export_smx_size = 192;
3018 rdev->config.evergreen.max_hw_contexts = 8;
3019 rdev->config.evergreen.sq_num_cf_insts = 2;
3020
3021 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3022 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3023 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003024 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003025 break;
3026 case CHIP_CAICOS:
3027 rdev->config.evergreen.num_ses = 1;
Jerome Glissebd25f072012-12-11 11:56:52 -05003028 rdev->config.evergreen.max_pipes = 2;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003029 rdev->config.evergreen.max_tile_pipes = 2;
3030 rdev->config.evergreen.max_simds = 2;
3031 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3032 rdev->config.evergreen.max_gprs = 256;
3033 rdev->config.evergreen.max_threads = 192;
3034 rdev->config.evergreen.max_gs_threads = 16;
3035 rdev->config.evergreen.max_stack_entries = 256;
3036 rdev->config.evergreen.sx_num_of_sets = 4;
3037 rdev->config.evergreen.sx_max_export_size = 128;
3038 rdev->config.evergreen.sx_max_export_pos_size = 32;
3039 rdev->config.evergreen.sx_max_export_smx_size = 96;
3040 rdev->config.evergreen.max_hw_contexts = 4;
3041 rdev->config.evergreen.sq_num_cf_insts = 1;
3042
3043 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3044 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3045 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003046 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003047 break;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003048 }
3049
3050 /* Initialize HDP */
3051 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3052 WREG32((0x2c14 + j), 0x00000000);
3053 WREG32((0x2c18 + j), 0x00000000);
3054 WREG32((0x2c1c + j), 0x00000000);
3055 WREG32((0x2c20 + j), 0x00000000);
3056 WREG32((0x2c24 + j), 0x00000000);
3057 }
3058
3059 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3060
Alex Deucherd054ac12011-09-01 17:46:15 +00003061 evergreen_fix_pci_max_read_req_size(rdev);
3062
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003063 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
Alex Deucher05b3ef62012-03-20 17:18:37 -04003064 if ((rdev->family == CHIP_PALM) ||
3065 (rdev->family == CHIP_SUMO) ||
3066 (rdev->family == CHIP_SUMO2))
Alex Deucherd9282fc2011-05-11 03:15:24 -04003067 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3068 else
3069 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003070
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003071 /* setup tiling info dword. gb_addr_config is not adequate since it does
3072 * not have bank info, so create a custom tiling dword.
3073 * bits 3:0 num_pipes
3074 * bits 7:4 num_banks
3075 * bits 11:8 group_size
3076 * bits 15:12 row_size
3077 */
3078 rdev->config.evergreen.tile_config = 0;
3079 switch (rdev->config.evergreen.max_tile_pipes) {
3080 case 1:
3081 default:
3082 rdev->config.evergreen.tile_config |= (0 << 0);
3083 break;
3084 case 2:
3085 rdev->config.evergreen.tile_config |= (1 << 0);
3086 break;
3087 case 4:
3088 rdev->config.evergreen.tile_config |= (2 << 0);
3089 break;
3090 case 8:
3091 rdev->config.evergreen.tile_config |= (3 << 0);
3092 break;
3093 }
Alex Deucherd698a342011-06-23 00:49:29 -04003094 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
Alex Deucher5bfa4872011-05-20 12:35:22 -04003095 if (rdev->flags & RADEON_IS_IGP)
Alex Deucherd698a342011-06-23 00:49:29 -04003096 rdev->config.evergreen.tile_config |= 1 << 4;
Alex Deucher29d65402012-05-31 18:53:36 -04003097 else {
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003098 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3099 case 0: /* four banks */
Alex Deucher29d65402012-05-31 18:53:36 -04003100 rdev->config.evergreen.tile_config |= 0 << 4;
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003101 break;
3102 case 1: /* eight banks */
3103 rdev->config.evergreen.tile_config |= 1 << 4;
3104 break;
3105 case 2: /* sixteen banks */
3106 default:
3107 rdev->config.evergreen.tile_config |= 2 << 4;
3108 break;
3109 }
Alex Deucher29d65402012-05-31 18:53:36 -04003110 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003111 rdev->config.evergreen.tile_config |= 0 << 8;
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003112 rdev->config.evergreen.tile_config |=
3113 ((gb_addr_config & 0x30000000) >> 28) << 12;
3114
Alex Deucher416a2bd2012-05-31 19:00:25 -04003115 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3116
3117 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3118 u32 efuse_straps_4;
3119 u32 efuse_straps_3;
3120
Alex Deucherff82bbc2013-04-12 11:27:20 -04003121 efuse_straps_4 = RREG32_RCU(0x204);
3122 efuse_straps_3 = RREG32_RCU(0x203);
Alex Deucher416a2bd2012-05-31 19:00:25 -04003123 tmp = (((efuse_straps_4 & 0xf) << 4) |
3124 ((efuse_straps_3 & 0xf0000000) >> 28));
3125 } else {
3126 tmp = 0;
3127 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3128 u32 rb_disable_bitmap;
3129
3130 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3131 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3132 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3133 tmp <<= 4;
3134 tmp |= rb_disable_bitmap;
3135 }
3136 }
3137 /* enabled rb are just the one not disabled :) */
3138 disabled_rb_mask = tmp;
Alex Deuchercedb6552013-04-09 10:13:22 -04003139 tmp = 0;
3140 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3141 tmp |= (1 << i);
3142 /* if all the backends are disabled, fix it up here */
3143 if ((disabled_rb_mask & tmp) == tmp) {
3144 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3145 disabled_rb_mask &= ~(1 << i);
3146 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003147
3148 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3149 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3150
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003151 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3152 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3153 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
Alex Deucher233d1ad2012-12-04 15:25:59 -05003154 WREG32(DMA_TILING_CONFIG, gb_addr_config);
Christian König9a210592013-04-08 12:41:37 +02003155 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3156 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3157 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003158
Alex Deucherf7eb9732013-01-30 13:57:40 -05003159 if ((rdev->config.evergreen.max_backends == 1) &&
3160 (rdev->flags & RADEON_IS_IGP)) {
3161 if ((disabled_rb_mask & 3) == 1) {
3162 /* RB0 disabled, RB1 enabled */
3163 tmp = 0x11111111;
3164 } else {
3165 /* RB1 disabled, RB0 enabled */
3166 tmp = 0x00000000;
3167 }
3168 } else {
3169 tmp = gb_addr_config & NUM_PIPES_MASK;
3170 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3171 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3172 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003173 WREG32(GB_BACKEND_MAP, tmp);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003174
3175 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3176 WREG32(CGTS_TCC_DISABLE, 0);
3177 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3178 WREG32(CGTS_USER_TCC_DISABLE, 0);
3179
3180 /* set HW defaults for 3D engine */
3181 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3182 ROQ_IB2_START(0x2b)));
3183
3184 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3185
3186 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3187 SYNC_GRADIENT |
3188 SYNC_WALKER |
3189 SYNC_ALIGNER));
3190
3191 sx_debug_1 = RREG32(SX_DEBUG_1);
3192 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3193 WREG32(SX_DEBUG_1, sx_debug_1);
3194
3195
3196 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3197 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3198 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3199 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3200
Alex Deucherb866d132012-06-14 22:06:36 +02003201 if (rdev->family <= CHIP_SUMO2)
3202 WREG32(SMX_SAR_CTL0, 0x00010000);
3203
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003204 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3205 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3206 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3207
3208 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3209 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3210 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3211
3212 WREG32(VGT_NUM_INSTANCES, 1);
3213 WREG32(SPI_CONFIG_CNTL, 0);
3214 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3215 WREG32(CP_PERFMON_CNTL, 0);
3216
3217 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3218 FETCH_FIFO_HIWATER(0x4) |
3219 DONE_FIFO_HIWATER(0xe0) |
3220 ALU_UPDATE_FIFO_HIWATER(0x8)));
3221
3222 sq_config = RREG32(SQ_CONFIG);
3223 sq_config &= ~(PS_PRIO(3) |
3224 VS_PRIO(3) |
3225 GS_PRIO(3) |
3226 ES_PRIO(3));
3227 sq_config |= (VC_ENABLE |
3228 EXPORT_SRC_C |
3229 PS_PRIO(0) |
3230 VS_PRIO(1) |
3231 GS_PRIO(2) |
3232 ES_PRIO(3));
3233
Alex Deucherd5e455e2010-11-22 17:56:29 -05003234 switch (rdev->family) {
3235 case CHIP_CEDAR:
3236 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003237 case CHIP_SUMO:
3238 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003239 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003240 /* no vertex cache */
3241 sq_config &= ~VC_ENABLE;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003242 break;
3243 default:
3244 break;
3245 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003246
3247 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3248
3249 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3250 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3251 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3252 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3253 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3254 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3255 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3256
Alex Deucherd5e455e2010-11-22 17:56:29 -05003257 switch (rdev->family) {
3258 case CHIP_CEDAR:
3259 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003260 case CHIP_SUMO:
3261 case CHIP_SUMO2:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003262 ps_thread_count = 96;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003263 break;
3264 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003265 ps_thread_count = 128;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003266 break;
3267 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003268
3269 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04003270 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3271 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3272 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3273 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3274 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003275
3276 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3277 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3278 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3279 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3280 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3281 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3282
3283 WREG32(SQ_CONFIG, sq_config);
3284 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3285 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3286 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3287 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3288 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3289 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3290 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3291 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3292 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3293 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3294
3295 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3296 FORCE_EOV_MAX_REZ_CNT(255)));
3297
Alex Deucherd5e455e2010-11-22 17:56:29 -05003298 switch (rdev->family) {
3299 case CHIP_CEDAR:
3300 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003301 case CHIP_SUMO:
3302 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003303 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003304 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003305 break;
3306 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003307 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003308 break;
3309 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003310 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3311 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3312
3313 WREG32(VGT_GS_VERTEX_REUSE, 16);
Alex Deucher12920592011-02-02 12:37:40 -05003314 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003315 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3316
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003317 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3318 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3319
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003320 WREG32(CB_PERF_CTR0_SEL_0, 0);
3321 WREG32(CB_PERF_CTR0_SEL_1, 0);
3322 WREG32(CB_PERF_CTR1_SEL_0, 0);
3323 WREG32(CB_PERF_CTR1_SEL_1, 0);
3324 WREG32(CB_PERF_CTR2_SEL_0, 0);
3325 WREG32(CB_PERF_CTR2_SEL_1, 0);
3326 WREG32(CB_PERF_CTR3_SEL_0, 0);
3327 WREG32(CB_PERF_CTR3_SEL_1, 0);
3328
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003329 /* clear render buffer base addresses */
3330 WREG32(CB_COLOR0_BASE, 0);
3331 WREG32(CB_COLOR1_BASE, 0);
3332 WREG32(CB_COLOR2_BASE, 0);
3333 WREG32(CB_COLOR3_BASE, 0);
3334 WREG32(CB_COLOR4_BASE, 0);
3335 WREG32(CB_COLOR5_BASE, 0);
3336 WREG32(CB_COLOR6_BASE, 0);
3337 WREG32(CB_COLOR7_BASE, 0);
3338 WREG32(CB_COLOR8_BASE, 0);
3339 WREG32(CB_COLOR9_BASE, 0);
3340 WREG32(CB_COLOR10_BASE, 0);
3341 WREG32(CB_COLOR11_BASE, 0);
3342
3343 /* set the shader const cache sizes to 0 */
3344 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3345 WREG32(i, 0);
3346 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3347 WREG32(i, 0);
3348
Alex Deucherf25a5c62011-05-19 11:07:57 -04003349 tmp = RREG32(HDP_MISC_CNTL);
3350 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3351 WREG32(HDP_MISC_CNTL, tmp);
3352
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003353 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3354 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3355
3356 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3357
3358 udelay(50);
3359
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003360}
3361
3362int evergreen_mc_init(struct radeon_device *rdev)
3363{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003364 u32 tmp;
3365 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003366
3367 /* Get VRAM informations */
3368 rdev->mc.vram_is_ddr = true;
Alex Deucher05b3ef62012-03-20 17:18:37 -04003369 if ((rdev->family == CHIP_PALM) ||
3370 (rdev->family == CHIP_SUMO) ||
3371 (rdev->family == CHIP_SUMO2))
Alex Deucher82084412011-07-01 13:18:28 -04003372 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3373 else
3374 tmp = RREG32(MC_ARB_RAMCFG);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003375 if (tmp & CHANSIZE_OVERRIDE) {
3376 chansize = 16;
3377 } else if (tmp & CHANSIZE_MASK) {
3378 chansize = 64;
3379 } else {
3380 chansize = 32;
3381 }
3382 tmp = RREG32(MC_SHARED_CHMAP);
3383 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3384 case 0:
3385 default:
3386 numchan = 1;
3387 break;
3388 case 1:
3389 numchan = 2;
3390 break;
3391 case 2:
3392 numchan = 4;
3393 break;
3394 case 3:
3395 numchan = 8;
3396 break;
3397 }
3398 rdev->mc.vram_width = numchan * chansize;
3399 /* Could aper size report 0 ? */
Jordan Crouse01d73a62010-05-27 13:40:24 -06003400 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3401 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003402 /* Setup GPU memory space */
Alex Deucher05b3ef62012-03-20 17:18:37 -04003403 if ((rdev->family == CHIP_PALM) ||
3404 (rdev->family == CHIP_SUMO) ||
3405 (rdev->family == CHIP_SUMO2)) {
Alex Deucher6eb18f82010-11-22 17:56:27 -05003406 /* size in bytes on fusion */
3407 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3408 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3409 } else {
Alex Deucher05b3ef62012-03-20 17:18:37 -04003410 /* size in MB on evergreen/cayman/tn */
Niels Ole Salscheiderfc986032013-05-18 21:19:23 +02003411 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3412 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
Alex Deucher6eb18f82010-11-22 17:56:27 -05003413 }
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00003414 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Alex Deucher0ef0c1f2010-11-22 17:56:26 -05003415 r700_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04003416 radeon_update_bandwidth_info(rdev);
3417
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003418 return 0;
3419}
Jerome Glissed594e462010-02-17 21:54:29 +00003420
Alex Deucher187e3592013-01-18 14:51:38 -05003421void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
Alex Deucher747943e2010-03-24 13:26:36 -04003422{
Jerome Glisse64c56e82013-01-02 17:30:35 -05003423 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003424 RREG32(GRBM_STATUS));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003425 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003426 RREG32(GRBM_STATUS_SE0));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003427 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003428 RREG32(GRBM_STATUS_SE1));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003429 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003430 RREG32(SRBM_STATUS));
Alex Deuchera65a4362013-01-18 18:55:54 -05003431 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3432 RREG32(SRBM_STATUS2));
Jerome Glisse440a7cd2012-06-27 12:25:01 -04003433 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3434 RREG32(CP_STALLED_STAT1));
3435 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3436 RREG32(CP_STALLED_STAT2));
3437 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3438 RREG32(CP_BUSY_STAT));
3439 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3440 RREG32(CP_STAT));
Alex Deucher0ecebb92013-01-03 12:40:13 -05003441 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3442 RREG32(DMA_STATUS_REG));
Alex Deucher168757e2013-01-18 19:17:22 -05003443 if (rdev->family >= CHIP_CAYMAN) {
3444 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3445 RREG32(DMA_STATUS_REG + 0x800));
3446 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003447}
3448
Alex Deucher168757e2013-01-18 19:17:22 -05003449bool evergreen_is_display_hung(struct radeon_device *rdev)
Alex Deuchera65a4362013-01-18 18:55:54 -05003450{
3451 u32 crtc_hung = 0;
3452 u32 crtc_status[6];
3453 u32 i, j, tmp;
3454
3455 for (i = 0; i < rdev->num_crtc; i++) {
3456 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3457 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3458 crtc_hung |= (1 << i);
3459 }
3460 }
3461
3462 for (j = 0; j < 10; j++) {
3463 for (i = 0; i < rdev->num_crtc; i++) {
3464 if (crtc_hung & (1 << i)) {
3465 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3466 if (tmp != crtc_status[i])
3467 crtc_hung &= ~(1 << i);
3468 }
3469 }
3470 if (crtc_hung == 0)
3471 return false;
3472 udelay(100);
3473 }
3474
3475 return true;
3476}
3477
3478static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3479{
3480 u32 reset_mask = 0;
3481 u32 tmp;
3482
3483 /* GRBM_STATUS */
3484 tmp = RREG32(GRBM_STATUS);
3485 if (tmp & (PA_BUSY | SC_BUSY |
3486 SH_BUSY | SX_BUSY |
3487 TA_BUSY | VGT_BUSY |
3488 DB_BUSY | CB_BUSY |
3489 SPI_BUSY | VGT_BUSY_NO_DMA))
3490 reset_mask |= RADEON_RESET_GFX;
3491
3492 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3493 CP_BUSY | CP_COHERENCY_BUSY))
3494 reset_mask |= RADEON_RESET_CP;
3495
3496 if (tmp & GRBM_EE_BUSY)
3497 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3498
3499 /* DMA_STATUS_REG */
3500 tmp = RREG32(DMA_STATUS_REG);
3501 if (!(tmp & DMA_IDLE))
3502 reset_mask |= RADEON_RESET_DMA;
3503
3504 /* SRBM_STATUS2 */
3505 tmp = RREG32(SRBM_STATUS2);
3506 if (tmp & DMA_BUSY)
3507 reset_mask |= RADEON_RESET_DMA;
3508
3509 /* SRBM_STATUS */
3510 tmp = RREG32(SRBM_STATUS);
3511 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3512 reset_mask |= RADEON_RESET_RLC;
3513
3514 if (tmp & IH_BUSY)
3515 reset_mask |= RADEON_RESET_IH;
3516
3517 if (tmp & SEM_BUSY)
3518 reset_mask |= RADEON_RESET_SEM;
3519
3520 if (tmp & GRBM_RQ_PENDING)
3521 reset_mask |= RADEON_RESET_GRBM;
3522
3523 if (tmp & VMC_BUSY)
3524 reset_mask |= RADEON_RESET_VMC;
3525
3526 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3527 MCC_BUSY | MCD_BUSY))
3528 reset_mask |= RADEON_RESET_MC;
3529
3530 if (evergreen_is_display_hung(rdev))
3531 reset_mask |= RADEON_RESET_DISPLAY;
3532
3533 /* VM_L2_STATUS */
3534 tmp = RREG32(VM_L2_STATUS);
3535 if (tmp & L2_BUSY)
3536 reset_mask |= RADEON_RESET_VMC;
3537
Alex Deucherd808fc82013-02-28 10:03:08 -05003538 /* Skip MC reset as it's mostly likely not hung, just busy */
3539 if (reset_mask & RADEON_RESET_MC) {
3540 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3541 reset_mask &= ~RADEON_RESET_MC;
3542 }
3543
Alex Deuchera65a4362013-01-18 18:55:54 -05003544 return reset_mask;
3545}
3546
3547static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
Alex Deucher0ecebb92013-01-03 12:40:13 -05003548{
3549 struct evergreen_mc_save save;
Alex Deucherb7630472013-01-18 14:28:41 -05003550 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3551 u32 tmp;
Alex Deucher19fc42e2013-01-14 11:04:39 -05003552
Alex Deucher0ecebb92013-01-03 12:40:13 -05003553 if (reset_mask == 0)
Alex Deuchera65a4362013-01-18 18:55:54 -05003554 return;
Alex Deucher0ecebb92013-01-03 12:40:13 -05003555
3556 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3557
Alex Deucherb7630472013-01-18 14:28:41 -05003558 evergreen_print_gpu_status_regs(rdev);
3559
Alex Deucherb7630472013-01-18 14:28:41 -05003560 /* Disable CP parsing/prefetching */
3561 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3562
3563 if (reset_mask & RADEON_RESET_DMA) {
3564 /* Disable DMA */
3565 tmp = RREG32(DMA_RB_CNTL);
3566 tmp &= ~DMA_RB_ENABLE;
3567 WREG32(DMA_RB_CNTL, tmp);
3568 }
3569
Alex Deucherb21b6e72013-01-23 18:57:56 -05003570 udelay(50);
3571
3572 evergreen_mc_stop(rdev, &save);
3573 if (evergreen_mc_wait_for_idle(rdev)) {
3574 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3575 }
3576
Alex Deucherb7630472013-01-18 14:28:41 -05003577 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3578 grbm_soft_reset |= SOFT_RESET_DB |
3579 SOFT_RESET_CB |
3580 SOFT_RESET_PA |
3581 SOFT_RESET_SC |
3582 SOFT_RESET_SPI |
3583 SOFT_RESET_SX |
3584 SOFT_RESET_SH |
3585 SOFT_RESET_TC |
3586 SOFT_RESET_TA |
3587 SOFT_RESET_VC |
3588 SOFT_RESET_VGT;
3589 }
3590
3591 if (reset_mask & RADEON_RESET_CP) {
3592 grbm_soft_reset |= SOFT_RESET_CP |
3593 SOFT_RESET_VGT;
3594
3595 srbm_soft_reset |= SOFT_RESET_GRBM;
3596 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003597
3598 if (reset_mask & RADEON_RESET_DMA)
Alex Deucherb7630472013-01-18 14:28:41 -05003599 srbm_soft_reset |= SOFT_RESET_DMA;
3600
Alex Deuchera65a4362013-01-18 18:55:54 -05003601 if (reset_mask & RADEON_RESET_DISPLAY)
3602 srbm_soft_reset |= SOFT_RESET_DC;
3603
3604 if (reset_mask & RADEON_RESET_RLC)
3605 srbm_soft_reset |= SOFT_RESET_RLC;
3606
3607 if (reset_mask & RADEON_RESET_SEM)
3608 srbm_soft_reset |= SOFT_RESET_SEM;
3609
3610 if (reset_mask & RADEON_RESET_IH)
3611 srbm_soft_reset |= SOFT_RESET_IH;
3612
3613 if (reset_mask & RADEON_RESET_GRBM)
3614 srbm_soft_reset |= SOFT_RESET_GRBM;
3615
3616 if (reset_mask & RADEON_RESET_VMC)
3617 srbm_soft_reset |= SOFT_RESET_VMC;
3618
Alex Deucher24178ec2013-01-24 15:00:17 -05003619 if (!(rdev->flags & RADEON_IS_IGP)) {
3620 if (reset_mask & RADEON_RESET_MC)
3621 srbm_soft_reset |= SOFT_RESET_MC;
3622 }
Alex Deuchera65a4362013-01-18 18:55:54 -05003623
Alex Deucherb7630472013-01-18 14:28:41 -05003624 if (grbm_soft_reset) {
3625 tmp = RREG32(GRBM_SOFT_RESET);
3626 tmp |= grbm_soft_reset;
3627 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3628 WREG32(GRBM_SOFT_RESET, tmp);
3629 tmp = RREG32(GRBM_SOFT_RESET);
3630
3631 udelay(50);
3632
3633 tmp &= ~grbm_soft_reset;
3634 WREG32(GRBM_SOFT_RESET, tmp);
3635 tmp = RREG32(GRBM_SOFT_RESET);
3636 }
3637
3638 if (srbm_soft_reset) {
3639 tmp = RREG32(SRBM_SOFT_RESET);
3640 tmp |= srbm_soft_reset;
3641 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3642 WREG32(SRBM_SOFT_RESET, tmp);
3643 tmp = RREG32(SRBM_SOFT_RESET);
3644
3645 udelay(50);
3646
3647 tmp &= ~srbm_soft_reset;
3648 WREG32(SRBM_SOFT_RESET, tmp);
3649 tmp = RREG32(SRBM_SOFT_RESET);
3650 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003651
3652 /* Wait a little for things to settle down */
3653 udelay(50);
3654
Alex Deucher747943e2010-03-24 13:26:36 -04003655 evergreen_mc_resume(rdev, &save);
Alex Deucherb7630472013-01-18 14:28:41 -05003656 udelay(50);
Alex Deucher410a3412013-01-18 13:05:39 -05003657
Alex Deucherb7630472013-01-18 14:28:41 -05003658 evergreen_print_gpu_status_regs(rdev);
Alex Deucher747943e2010-03-24 13:26:36 -04003659}
3660
Jerome Glissea2d07b72010-03-09 14:45:11 +00003661int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003662{
Alex Deuchera65a4362013-01-18 18:55:54 -05003663 u32 reset_mask;
3664
3665 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3666
3667 if (reset_mask)
3668 r600_set_bios_scratch_engine_hung(rdev, true);
3669
3670 evergreen_gpu_soft_reset(rdev, reset_mask);
3671
3672 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3673
3674 if (!reset_mask)
3675 r600_set_bios_scratch_engine_hung(rdev, false);
3676
3677 return 0;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003678}
3679
Alex Deucher123bc182013-01-24 11:37:19 -05003680/**
3681 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3682 *
3683 * @rdev: radeon_device pointer
3684 * @ring: radeon_ring structure holding ring information
3685 *
3686 * Check if the GFX engine is locked up.
3687 * Returns true if the engine appears to be locked up, false if not.
3688 */
3689bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3690{
3691 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3692
3693 if (!(reset_mask & (RADEON_RESET_GFX |
3694 RADEON_RESET_COMPUTE |
3695 RADEON_RESET_CP))) {
3696 radeon_ring_lockup_update(ring);
3697 return false;
3698 }
3699 /* force CP activities */
3700 radeon_ring_force_activity(rdev, ring);
3701 return radeon_ring_test_lockup(rdev, ring);
3702}
3703
3704/**
3705 * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3706 *
3707 * @rdev: radeon_device pointer
3708 * @ring: radeon_ring structure holding ring information
3709 *
3710 * Check if the async DMA engine is locked up.
3711 * Returns true if the engine appears to be locked up, false if not.
3712 */
3713bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3714{
3715 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3716
3717 if (!(reset_mask & RADEON_RESET_DMA)) {
3718 radeon_ring_lockup_update(ring);
3719 return false;
3720 }
3721 /* force ring activities */
3722 radeon_ring_force_activity(rdev, ring);
3723 return radeon_ring_test_lockup(rdev, ring);
3724}
3725
Alex Deucher45f9a392010-03-24 13:55:51 -04003726/* Interrupts */
3727
3728u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3729{
Alex Deucher46437052012-08-15 17:10:32 -04003730 if (crtc >= rdev->num_crtc)
Alex Deucher45f9a392010-03-24 13:55:51 -04003731 return 0;
Alex Deucher46437052012-08-15 17:10:32 -04003732 else
3733 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
Alex Deucher45f9a392010-03-24 13:55:51 -04003734}
3735
3736void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3737{
3738 u32 tmp;
3739
Alex Deucher1b370782011-11-17 20:13:28 -05003740 if (rdev->family >= CHIP_CAYMAN) {
3741 cayman_cp_int_cntl_setup(rdev, 0,
3742 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3743 cayman_cp_int_cntl_setup(rdev, 1, 0);
3744 cayman_cp_int_cntl_setup(rdev, 2, 0);
Alex Deucherf60cbd12012-12-04 15:27:33 -05003745 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3746 WREG32(CAYMAN_DMA1_CNTL, tmp);
Alex Deucher1b370782011-11-17 20:13:28 -05003747 } else
3748 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Alex Deucher233d1ad2012-12-04 15:25:59 -05003749 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3750 WREG32(DMA_CNTL, tmp);
Alex Deucher45f9a392010-03-24 13:55:51 -04003751 WREG32(GRBM_INT_CNTL, 0);
3752 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3753 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04003754 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05003755 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3756 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04003757 }
3758 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05003759 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3760 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3761 }
Alex Deucher45f9a392010-03-24 13:55:51 -04003762
3763 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3764 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04003765 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05003766 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3767 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04003768 }
3769 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05003770 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3771 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3772 }
Alex Deucher45f9a392010-03-24 13:55:51 -04003773
Alex Deucher05b3ef62012-03-20 17:18:37 -04003774 /* only one DAC on DCE6 */
3775 if (!ASIC_IS_DCE6(rdev))
3776 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
Alex Deucher45f9a392010-03-24 13:55:51 -04003777 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3778
3779 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3780 WREG32(DC_HPD1_INT_CONTROL, tmp);
3781 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3782 WREG32(DC_HPD2_INT_CONTROL, tmp);
3783 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3784 WREG32(DC_HPD3_INT_CONTROL, tmp);
3785 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3786 WREG32(DC_HPD4_INT_CONTROL, tmp);
3787 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3788 WREG32(DC_HPD5_INT_CONTROL, tmp);
3789 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3790 WREG32(DC_HPD6_INT_CONTROL, tmp);
3791
3792}
3793
3794int evergreen_irq_set(struct radeon_device *rdev)
3795{
3796 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
Alex Deucher1b370782011-11-17 20:13:28 -05003797 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04003798 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3799 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04003800 u32 grbm_int_cntl = 0;
Alex Deucher6f34be52010-11-21 10:59:01 -05003801 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
Alex Deucherf122c612012-03-30 08:59:57 -04003802 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
Alex Deucherf60cbd12012-12-04 15:27:33 -05003803 u32 dma_cntl, dma_cntl1 = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04003804
3805 if (!rdev->irq.installed) {
Joe Perchesfce7d612010-10-30 21:08:30 +00003806 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Alex Deucher45f9a392010-03-24 13:55:51 -04003807 return -EINVAL;
3808 }
3809 /* don't enable anything if the ih is disabled */
3810 if (!rdev->ih.enabled) {
3811 r600_disable_interrupts(rdev);
3812 /* force the active interrupt state to all disabled */
3813 evergreen_disable_interrupt_state(rdev);
3814 return 0;
3815 }
3816
3817 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3818 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3819 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3820 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3821 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3822 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3823
Alex Deucherf122c612012-03-30 08:59:57 -04003824 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3825 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3826 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3827 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3828 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3829 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3830
Alex Deucher233d1ad2012-12-04 15:25:59 -05003831 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3832
Alex Deucher1b370782011-11-17 20:13:28 -05003833 if (rdev->family >= CHIP_CAYMAN) {
3834 /* enable CP interrupts on all rings */
Christian Koenig736fc372012-05-17 19:52:00 +02003835 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05003836 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3837 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3838 }
Christian Koenig736fc372012-05-17 19:52:00 +02003839 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05003840 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3841 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3842 }
Christian Koenig736fc372012-05-17 19:52:00 +02003843 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05003844 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3845 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3846 }
3847 } else {
Christian Koenig736fc372012-05-17 19:52:00 +02003848 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05003849 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3850 cp_int_cntl |= RB_INT_ENABLE;
3851 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3852 }
Alex Deucher45f9a392010-03-24 13:55:51 -04003853 }
Alex Deucher1b370782011-11-17 20:13:28 -05003854
Alex Deucher233d1ad2012-12-04 15:25:59 -05003855 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3856 DRM_DEBUG("r600_irq_set: sw int dma\n");
3857 dma_cntl |= TRAP_ENABLE;
3858 }
3859
Alex Deucherf60cbd12012-12-04 15:27:33 -05003860 if (rdev->family >= CHIP_CAYMAN) {
3861 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3862 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3863 DRM_DEBUG("r600_irq_set: sw int dma1\n");
3864 dma_cntl1 |= TRAP_ENABLE;
3865 }
3866 }
3867
Alex Deucher6f34be52010-11-21 10:59:01 -05003868 if (rdev->irq.crtc_vblank_int[0] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003869 atomic_read(&rdev->irq.pflip[0])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003870 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3871 crtc1 |= VBLANK_INT_MASK;
3872 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003873 if (rdev->irq.crtc_vblank_int[1] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003874 atomic_read(&rdev->irq.pflip[1])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003875 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3876 crtc2 |= VBLANK_INT_MASK;
3877 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003878 if (rdev->irq.crtc_vblank_int[2] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003879 atomic_read(&rdev->irq.pflip[2])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003880 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3881 crtc3 |= VBLANK_INT_MASK;
3882 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003883 if (rdev->irq.crtc_vblank_int[3] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003884 atomic_read(&rdev->irq.pflip[3])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003885 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3886 crtc4 |= VBLANK_INT_MASK;
3887 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003888 if (rdev->irq.crtc_vblank_int[4] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003889 atomic_read(&rdev->irq.pflip[4])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003890 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3891 crtc5 |= VBLANK_INT_MASK;
3892 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003893 if (rdev->irq.crtc_vblank_int[5] ||
Christian Koenig736fc372012-05-17 19:52:00 +02003894 atomic_read(&rdev->irq.pflip[5])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04003895 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3896 crtc6 |= VBLANK_INT_MASK;
3897 }
3898 if (rdev->irq.hpd[0]) {
3899 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3900 hpd1 |= DC_HPDx_INT_EN;
3901 }
3902 if (rdev->irq.hpd[1]) {
3903 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3904 hpd2 |= DC_HPDx_INT_EN;
3905 }
3906 if (rdev->irq.hpd[2]) {
3907 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3908 hpd3 |= DC_HPDx_INT_EN;
3909 }
3910 if (rdev->irq.hpd[3]) {
3911 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3912 hpd4 |= DC_HPDx_INT_EN;
3913 }
3914 if (rdev->irq.hpd[4]) {
3915 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3916 hpd5 |= DC_HPDx_INT_EN;
3917 }
3918 if (rdev->irq.hpd[5]) {
3919 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3920 hpd6 |= DC_HPDx_INT_EN;
3921 }
Alex Deucherf122c612012-03-30 08:59:57 -04003922 if (rdev->irq.afmt[0]) {
3923 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3924 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3925 }
3926 if (rdev->irq.afmt[1]) {
3927 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3928 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3929 }
3930 if (rdev->irq.afmt[2]) {
3931 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3932 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3933 }
3934 if (rdev->irq.afmt[3]) {
3935 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3936 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3937 }
3938 if (rdev->irq.afmt[4]) {
3939 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3940 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3941 }
3942 if (rdev->irq.afmt[5]) {
3943 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3944 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3945 }
Alex Deucher45f9a392010-03-24 13:55:51 -04003946
Alex Deucher1b370782011-11-17 20:13:28 -05003947 if (rdev->family >= CHIP_CAYMAN) {
3948 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3949 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3950 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3951 } else
3952 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher233d1ad2012-12-04 15:25:59 -05003953
3954 WREG32(DMA_CNTL, dma_cntl);
3955
Alex Deucherf60cbd12012-12-04 15:27:33 -05003956 if (rdev->family >= CHIP_CAYMAN)
3957 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3958
Alex Deucher2031f772010-04-22 12:52:11 -04003959 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04003960
3961 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3962 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
Alex Deucherb7eff392011-07-08 11:44:56 -04003963 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05003964 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3965 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
Alex Deucherb7eff392011-07-08 11:44:56 -04003966 }
3967 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05003968 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3969 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3970 }
Alex Deucher45f9a392010-03-24 13:55:51 -04003971
Alex Deucher6f34be52010-11-21 10:59:01 -05003972 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3973 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
Alex Deucherb7eff392011-07-08 11:44:56 -04003974 if (rdev->num_crtc >= 4) {
3975 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3976 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3977 }
3978 if (rdev->num_crtc >= 6) {
3979 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3980 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3981 }
Alex Deucher6f34be52010-11-21 10:59:01 -05003982
Alex Deucher45f9a392010-03-24 13:55:51 -04003983 WREG32(DC_HPD1_INT_CONTROL, hpd1);
3984 WREG32(DC_HPD2_INT_CONTROL, hpd2);
3985 WREG32(DC_HPD3_INT_CONTROL, hpd3);
3986 WREG32(DC_HPD4_INT_CONTROL, hpd4);
3987 WREG32(DC_HPD5_INT_CONTROL, hpd5);
3988 WREG32(DC_HPD6_INT_CONTROL, hpd6);
3989
Alex Deucherf122c612012-03-30 08:59:57 -04003990 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
3991 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
3992 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
3993 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
3994 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
3995 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
3996
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003997 return 0;
3998}
3999
Andi Kleencbdd4502011-10-13 16:08:46 -07004000static void evergreen_irq_ack(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004001{
4002 u32 tmp;
4003
Alex Deucher6f34be52010-11-21 10:59:01 -05004004 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4005 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4006 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4007 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4008 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4009 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4010 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4011 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
Alex Deucherb7eff392011-07-08 11:44:56 -04004012 if (rdev->num_crtc >= 4) {
4013 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4014 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4015 }
4016 if (rdev->num_crtc >= 6) {
4017 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4018 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4019 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004020
Alex Deucherf122c612012-03-30 08:59:57 -04004021 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4022 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4023 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4024 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4025 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4026 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4027
Alex Deucher6f34be52010-11-21 10:59:01 -05004028 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4029 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4030 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4031 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
Alex Deucher6f34be52010-11-21 10:59:01 -05004032 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004033 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004034 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004035 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004036 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004037 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004038 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004039 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4040
Alex Deucherb7eff392011-07-08 11:44:56 -04004041 if (rdev->num_crtc >= 4) {
4042 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4043 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4044 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4045 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4046 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4047 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4048 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4049 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4050 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4051 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4052 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4053 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4054 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004055
Alex Deucherb7eff392011-07-08 11:44:56 -04004056 if (rdev->num_crtc >= 6) {
4057 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4058 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4059 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4060 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4061 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4062 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4063 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4064 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4065 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4066 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4067 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4068 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4069 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004070
Alex Deucher6f34be52010-11-21 10:59:01 -05004071 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004072 tmp = RREG32(DC_HPD1_INT_CONTROL);
4073 tmp |= DC_HPDx_INT_ACK;
4074 WREG32(DC_HPD1_INT_CONTROL, tmp);
4075 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004076 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004077 tmp = RREG32(DC_HPD2_INT_CONTROL);
4078 tmp |= DC_HPDx_INT_ACK;
4079 WREG32(DC_HPD2_INT_CONTROL, tmp);
4080 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004081 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004082 tmp = RREG32(DC_HPD3_INT_CONTROL);
4083 tmp |= DC_HPDx_INT_ACK;
4084 WREG32(DC_HPD3_INT_CONTROL, tmp);
4085 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004086 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004087 tmp = RREG32(DC_HPD4_INT_CONTROL);
4088 tmp |= DC_HPDx_INT_ACK;
4089 WREG32(DC_HPD4_INT_CONTROL, tmp);
4090 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004091 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004092 tmp = RREG32(DC_HPD5_INT_CONTROL);
4093 tmp |= DC_HPDx_INT_ACK;
4094 WREG32(DC_HPD5_INT_CONTROL, tmp);
4095 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004096 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004097 tmp = RREG32(DC_HPD5_INT_CONTROL);
4098 tmp |= DC_HPDx_INT_ACK;
4099 WREG32(DC_HPD6_INT_CONTROL, tmp);
4100 }
Alex Deucherf122c612012-03-30 08:59:57 -04004101 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4102 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4103 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4104 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4105 }
4106 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4107 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4108 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4109 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4110 }
4111 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4112 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4113 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4114 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4115 }
4116 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4117 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4118 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4119 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4120 }
4121 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4122 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4123 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4124 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4125 }
4126 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4127 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4128 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4129 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4130 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004131}
4132
Lauri Kasanen1109ca02012-08-31 13:43:50 -04004133static void evergreen_irq_disable(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004134{
Alex Deucher45f9a392010-03-24 13:55:51 -04004135 r600_disable_interrupts(rdev);
4136 /* Wait and acknowledge irq */
4137 mdelay(1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004138 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004139 evergreen_disable_interrupt_state(rdev);
4140}
4141
Alex Deucher755d8192011-03-02 20:07:34 -05004142void evergreen_irq_suspend(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004143{
4144 evergreen_irq_disable(rdev);
4145 r600_rlc_stop(rdev);
4146}
4147
Andi Kleencbdd4502011-10-13 16:08:46 -07004148static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004149{
4150 u32 wptr, tmp;
4151
Alex Deucher724c80e2010-08-27 18:25:25 -04004152 if (rdev->wb.enabled)
Cédric Cano204ae242011-04-19 11:07:13 -04004153 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
Alex Deucher724c80e2010-08-27 18:25:25 -04004154 else
4155 wptr = RREG32(IH_RB_WPTR);
Alex Deucher45f9a392010-03-24 13:55:51 -04004156
4157 if (wptr & RB_OVERFLOW) {
4158 /* When a ring buffer overflow happen start parsing interrupt
4159 * from the last not overwritten vector (wptr + 16). Hopefully
4160 * this should allow us to catchup.
4161 */
4162 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4163 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4164 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4165 tmp = RREG32(IH_RB_CNTL);
4166 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4167 WREG32(IH_RB_CNTL, tmp);
4168 }
4169 return (wptr & rdev->ih.ptr_mask);
4170}
4171
4172int evergreen_irq_process(struct radeon_device *rdev)
4173{
Dave Airlie682f1a52011-06-18 03:59:51 +00004174 u32 wptr;
4175 u32 rptr;
Alex Deucher45f9a392010-03-24 13:55:51 -04004176 u32 src_id, src_data;
4177 u32 ring_index;
Alex Deucher45f9a392010-03-24 13:55:51 -04004178 bool queue_hotplug = false;
Alex Deucherf122c612012-03-30 08:59:57 -04004179 bool queue_hdmi = false;
Alex Deucher45f9a392010-03-24 13:55:51 -04004180
Dave Airlie682f1a52011-06-18 03:59:51 +00004181 if (!rdev->ih.enabled || rdev->shutdown)
Alex Deucher45f9a392010-03-24 13:55:51 -04004182 return IRQ_NONE;
4183
Dave Airlie682f1a52011-06-18 03:59:51 +00004184 wptr = evergreen_get_ih_wptr(rdev);
Christian Koenigc20dc362012-05-16 21:45:24 +02004185
4186restart_ih:
4187 /* is somebody else already processing irqs? */
4188 if (atomic_xchg(&rdev->ih.lock, 1))
4189 return IRQ_NONE;
4190
Dave Airlie682f1a52011-06-18 03:59:51 +00004191 rptr = rdev->ih.rptr;
4192 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
Alex Deucher45f9a392010-03-24 13:55:51 -04004193
Benjamin Herrenschmidt964f6642011-07-13 16:28:19 +10004194 /* Order reading of wptr vs. reading of IH ring data */
4195 rmb();
4196
Alex Deucher45f9a392010-03-24 13:55:51 -04004197 /* display interrupts */
Alex Deucher6f34be52010-11-21 10:59:01 -05004198 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004199
Alex Deucher45f9a392010-03-24 13:55:51 -04004200 while (rptr != wptr) {
4201 /* wptr/rptr are in bytes! */
4202 ring_index = rptr / 4;
Alex Deucher0f234f5f2011-02-13 19:06:33 -05004203 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4204 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
Alex Deucher45f9a392010-03-24 13:55:51 -04004205
4206 switch (src_id) {
4207 case 1: /* D1 vblank/vline */
4208 switch (src_data) {
4209 case 0: /* D1 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004210 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004211 if (rdev->irq.crtc_vblank_int[0]) {
4212 drm_handle_vblank(rdev->ddev, 0);
4213 rdev->pm.vblank_sync = true;
4214 wake_up(&rdev->irq.vblank_queue);
4215 }
Christian Koenig736fc372012-05-17 19:52:00 +02004216 if (atomic_read(&rdev->irq.pflip[0]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004217 radeon_crtc_handle_flip(rdev, 0);
Alex Deucher6f34be52010-11-21 10:59:01 -05004218 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004219 DRM_DEBUG("IH: D1 vblank\n");
4220 }
4221 break;
4222 case 1: /* D1 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004223 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4224 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004225 DRM_DEBUG("IH: D1 vline\n");
4226 }
4227 break;
4228 default:
4229 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4230 break;
4231 }
4232 break;
4233 case 2: /* D2 vblank/vline */
4234 switch (src_data) {
4235 case 0: /* D2 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004236 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004237 if (rdev->irq.crtc_vblank_int[1]) {
4238 drm_handle_vblank(rdev->ddev, 1);
4239 rdev->pm.vblank_sync = true;
4240 wake_up(&rdev->irq.vblank_queue);
4241 }
Christian Koenig736fc372012-05-17 19:52:00 +02004242 if (atomic_read(&rdev->irq.pflip[1]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004243 radeon_crtc_handle_flip(rdev, 1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004244 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004245 DRM_DEBUG("IH: D2 vblank\n");
4246 }
4247 break;
4248 case 1: /* D2 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004249 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4250 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004251 DRM_DEBUG("IH: D2 vline\n");
4252 }
4253 break;
4254 default:
4255 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4256 break;
4257 }
4258 break;
4259 case 3: /* D3 vblank/vline */
4260 switch (src_data) {
4261 case 0: /* D3 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004262 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4263 if (rdev->irq.crtc_vblank_int[2]) {
4264 drm_handle_vblank(rdev->ddev, 2);
4265 rdev->pm.vblank_sync = true;
4266 wake_up(&rdev->irq.vblank_queue);
4267 }
Christian Koenig736fc372012-05-17 19:52:00 +02004268 if (atomic_read(&rdev->irq.pflip[2]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004269 radeon_crtc_handle_flip(rdev, 2);
4270 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004271 DRM_DEBUG("IH: D3 vblank\n");
4272 }
4273 break;
4274 case 1: /* D3 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004275 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4276 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004277 DRM_DEBUG("IH: D3 vline\n");
4278 }
4279 break;
4280 default:
4281 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4282 break;
4283 }
4284 break;
4285 case 4: /* D4 vblank/vline */
4286 switch (src_data) {
4287 case 0: /* D4 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004288 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4289 if (rdev->irq.crtc_vblank_int[3]) {
4290 drm_handle_vblank(rdev->ddev, 3);
4291 rdev->pm.vblank_sync = true;
4292 wake_up(&rdev->irq.vblank_queue);
4293 }
Christian Koenig736fc372012-05-17 19:52:00 +02004294 if (atomic_read(&rdev->irq.pflip[3]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004295 radeon_crtc_handle_flip(rdev, 3);
4296 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004297 DRM_DEBUG("IH: D4 vblank\n");
4298 }
4299 break;
4300 case 1: /* D4 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004301 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4302 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004303 DRM_DEBUG("IH: D4 vline\n");
4304 }
4305 break;
4306 default:
4307 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4308 break;
4309 }
4310 break;
4311 case 5: /* D5 vblank/vline */
4312 switch (src_data) {
4313 case 0: /* D5 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004314 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4315 if (rdev->irq.crtc_vblank_int[4]) {
4316 drm_handle_vblank(rdev->ddev, 4);
4317 rdev->pm.vblank_sync = true;
4318 wake_up(&rdev->irq.vblank_queue);
4319 }
Christian Koenig736fc372012-05-17 19:52:00 +02004320 if (atomic_read(&rdev->irq.pflip[4]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004321 radeon_crtc_handle_flip(rdev, 4);
4322 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004323 DRM_DEBUG("IH: D5 vblank\n");
4324 }
4325 break;
4326 case 1: /* D5 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004327 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4328 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004329 DRM_DEBUG("IH: D5 vline\n");
4330 }
4331 break;
4332 default:
4333 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4334 break;
4335 }
4336 break;
4337 case 6: /* D6 vblank/vline */
4338 switch (src_data) {
4339 case 0: /* D6 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004340 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4341 if (rdev->irq.crtc_vblank_int[5]) {
4342 drm_handle_vblank(rdev->ddev, 5);
4343 rdev->pm.vblank_sync = true;
4344 wake_up(&rdev->irq.vblank_queue);
4345 }
Christian Koenig736fc372012-05-17 19:52:00 +02004346 if (atomic_read(&rdev->irq.pflip[5]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004347 radeon_crtc_handle_flip(rdev, 5);
4348 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004349 DRM_DEBUG("IH: D6 vblank\n");
4350 }
4351 break;
4352 case 1: /* D6 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004353 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4354 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004355 DRM_DEBUG("IH: D6 vline\n");
4356 }
4357 break;
4358 default:
4359 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4360 break;
4361 }
4362 break;
4363 case 42: /* HPD hotplug */
4364 switch (src_data) {
4365 case 0:
Alex Deucher6f34be52010-11-21 10:59:01 -05004366 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4367 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004368 queue_hotplug = true;
4369 DRM_DEBUG("IH: HPD1\n");
4370 }
4371 break;
4372 case 1:
Alex Deucher6f34be52010-11-21 10:59:01 -05004373 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4374 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004375 queue_hotplug = true;
4376 DRM_DEBUG("IH: HPD2\n");
4377 }
4378 break;
4379 case 2:
Alex Deucher6f34be52010-11-21 10:59:01 -05004380 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4381 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004382 queue_hotplug = true;
4383 DRM_DEBUG("IH: HPD3\n");
4384 }
4385 break;
4386 case 3:
Alex Deucher6f34be52010-11-21 10:59:01 -05004387 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4388 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004389 queue_hotplug = true;
4390 DRM_DEBUG("IH: HPD4\n");
4391 }
4392 break;
4393 case 4:
Alex Deucher6f34be52010-11-21 10:59:01 -05004394 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4395 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004396 queue_hotplug = true;
4397 DRM_DEBUG("IH: HPD5\n");
4398 }
4399 break;
4400 case 5:
Alex Deucher6f34be52010-11-21 10:59:01 -05004401 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4402 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004403 queue_hotplug = true;
4404 DRM_DEBUG("IH: HPD6\n");
4405 }
4406 break;
4407 default:
4408 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4409 break;
4410 }
4411 break;
Alex Deucherf122c612012-03-30 08:59:57 -04004412 case 44: /* hdmi */
4413 switch (src_data) {
4414 case 0:
4415 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4416 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4417 queue_hdmi = true;
4418 DRM_DEBUG("IH: HDMI0\n");
4419 }
4420 break;
4421 case 1:
4422 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4423 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4424 queue_hdmi = true;
4425 DRM_DEBUG("IH: HDMI1\n");
4426 }
4427 break;
4428 case 2:
4429 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4430 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4431 queue_hdmi = true;
4432 DRM_DEBUG("IH: HDMI2\n");
4433 }
4434 break;
4435 case 3:
4436 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4437 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4438 queue_hdmi = true;
4439 DRM_DEBUG("IH: HDMI3\n");
4440 }
4441 break;
4442 case 4:
4443 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4444 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4445 queue_hdmi = true;
4446 DRM_DEBUG("IH: HDMI4\n");
4447 }
4448 break;
4449 case 5:
4450 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4451 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4452 queue_hdmi = true;
4453 DRM_DEBUG("IH: HDMI5\n");
4454 }
4455 break;
4456 default:
4457 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4458 break;
4459 }
Christian Königf2ba57b2013-04-08 12:41:29 +02004460 case 124: /* UVD */
4461 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4462 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
Alex Deucherf122c612012-03-30 08:59:57 -04004463 break;
Christian Königae133a12012-09-18 15:30:44 -04004464 case 146:
4465 case 147:
4466 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4467 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
4468 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4469 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4470 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4471 /* reset addr and status */
4472 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4473 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04004474 case 176: /* CP_INT in ring buffer */
4475 case 177: /* CP_INT in IB1 */
4476 case 178: /* CP_INT in IB2 */
4477 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
Alex Deucher74652802011-08-25 13:39:48 -04004478 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004479 break;
4480 case 181: /* CP EOP event */
4481 DRM_DEBUG("IH: CP EOP\n");
Alex Deucher1b370782011-11-17 20:13:28 -05004482 if (rdev->family >= CHIP_CAYMAN) {
4483 switch (src_data) {
4484 case 0:
4485 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4486 break;
4487 case 1:
4488 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4489 break;
4490 case 2:
4491 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4492 break;
4493 }
4494 } else
4495 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004496 break;
Alex Deucher233d1ad2012-12-04 15:25:59 -05004497 case 224: /* DMA trap event */
4498 DRM_DEBUG("IH: DMA trap\n");
4499 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4500 break;
Alex Deucher2031f772010-04-22 12:52:11 -04004501 case 233: /* GUI IDLE */
Ilija Hadzic303c8052011-06-07 14:54:48 -04004502 DRM_DEBUG("IH: GUI idle\n");
Alex Deucher2031f772010-04-22 12:52:11 -04004503 break;
Alex Deucherf60cbd12012-12-04 15:27:33 -05004504 case 244: /* DMA trap event */
4505 if (rdev->family >= CHIP_CAYMAN) {
4506 DRM_DEBUG("IH: DMA1 trap\n");
4507 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4508 }
4509 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04004510 default:
4511 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4512 break;
4513 }
4514
4515 /* wptr/rptr are in bytes! */
4516 rptr += 16;
4517 rptr &= rdev->ih.ptr_mask;
4518 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004519 if (queue_hotplug)
Tejun Heo32c87fc2011-01-03 14:49:32 +01004520 schedule_work(&rdev->hotplug_work);
Alex Deucherf122c612012-03-30 08:59:57 -04004521 if (queue_hdmi)
4522 schedule_work(&rdev->audio_work);
Alex Deucher45f9a392010-03-24 13:55:51 -04004523 rdev->ih.rptr = rptr;
4524 WREG32(IH_RB_RPTR, rdev->ih.rptr);
Christian Koenigc20dc362012-05-16 21:45:24 +02004525 atomic_set(&rdev->ih.lock, 0);
4526
4527 /* make sure wptr hasn't changed while processing */
4528 wptr = evergreen_get_ih_wptr(rdev);
4529 if (wptr != rptr)
4530 goto restart_ih;
4531
Alex Deucher45f9a392010-03-24 13:55:51 -04004532 return IRQ_HANDLED;
4533}
4534
Alex Deucher233d1ad2012-12-04 15:25:59 -05004535/**
4536 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4537 *
4538 * @rdev: radeon_device pointer
4539 * @fence: radeon fence object
4540 *
4541 * Add a DMA fence packet to the ring to write
4542 * the fence seq number and DMA trap packet to generate
4543 * an interrupt if needed (evergreen-SI).
4544 */
4545void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4546 struct radeon_fence *fence)
4547{
4548 struct radeon_ring *ring = &rdev->ring[fence->ring];
4549 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4550 /* write the fence */
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004551 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004552 radeon_ring_write(ring, addr & 0xfffffffc);
4553 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4554 radeon_ring_write(ring, fence->seq);
4555 /* generate an interrupt */
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004556 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004557 /* flush HDP */
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004558 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
Alex Deucher4b681c22013-01-03 19:54:34 -05004559 radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004560 radeon_ring_write(ring, 1);
4561}
4562
4563/**
4564 * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4565 *
4566 * @rdev: radeon_device pointer
4567 * @ib: IB object to schedule
4568 *
4569 * Schedule an IB in the DMA ring (evergreen).
4570 */
4571void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4572 struct radeon_ib *ib)
4573{
4574 struct radeon_ring *ring = &rdev->ring[ib->ring];
4575
4576 if (rdev->wb.enabled) {
4577 u32 next_rptr = ring->wptr + 4;
4578 while ((next_rptr & 7) != 5)
4579 next_rptr++;
4580 next_rptr += 3;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004581 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004582 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4583 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4584 radeon_ring_write(ring, next_rptr);
4585 }
4586
4587 /* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4588 * Pad as necessary with NOPs.
4589 */
4590 while ((ring->wptr & 7) != 5)
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004591 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4592 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004593 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4594 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4595
4596}
4597
4598/**
4599 * evergreen_copy_dma - copy pages using the DMA engine
4600 *
4601 * @rdev: radeon_device pointer
4602 * @src_offset: src GPU address
4603 * @dst_offset: dst GPU address
4604 * @num_gpu_pages: number of GPU pages to xfer
4605 * @fence: radeon fence object
4606 *
4607 * Copy GPU paging using the DMA engine (evergreen-cayman).
4608 * Used by the radeon ttm implementation to move pages if
4609 * registered as the asic copy callback.
4610 */
4611int evergreen_copy_dma(struct radeon_device *rdev,
4612 uint64_t src_offset, uint64_t dst_offset,
4613 unsigned num_gpu_pages,
4614 struct radeon_fence **fence)
4615{
4616 struct radeon_semaphore *sem = NULL;
4617 int ring_index = rdev->asic->copy.dma_ring_index;
4618 struct radeon_ring *ring = &rdev->ring[ring_index];
4619 u32 size_in_dw, cur_size_in_dw;
4620 int i, num_loops;
4621 int r = 0;
4622
4623 r = radeon_semaphore_create(rdev, &sem);
4624 if (r) {
4625 DRM_ERROR("radeon: moving bo (%d).\n", r);
4626 return r;
4627 }
4628
4629 size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4630 num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4631 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4632 if (r) {
4633 DRM_ERROR("radeon: moving bo (%d).\n", r);
4634 radeon_semaphore_free(rdev, &sem, NULL);
4635 return r;
4636 }
4637
4638 if (radeon_fence_need_sync(*fence, ring->idx)) {
4639 radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4640 ring->idx);
4641 radeon_fence_note_sync(*fence, ring->idx);
4642 } else {
4643 radeon_semaphore_free(rdev, &sem, NULL);
4644 }
4645
4646 for (i = 0; i < num_loops; i++) {
4647 cur_size_in_dw = size_in_dw;
4648 if (cur_size_in_dw > 0xFFFFF)
4649 cur_size_in_dw = 0xFFFFF;
4650 size_in_dw -= cur_size_in_dw;
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004651 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004652 radeon_ring_write(ring, dst_offset & 0xfffffffc);
4653 radeon_ring_write(ring, src_offset & 0xfffffffc);
4654 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4655 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4656 src_offset += cur_size_in_dw * 4;
4657 dst_offset += cur_size_in_dw * 4;
4658 }
4659
4660 r = radeon_fence_emit(rdev, fence, ring->idx);
4661 if (r) {
4662 radeon_ring_unlock_undo(rdev, ring);
4663 return r;
4664 }
4665
4666 radeon_ring_unlock_commit(rdev, ring);
4667 radeon_semaphore_free(rdev, &sem, *fence);
4668
4669 return r;
4670}
4671
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004672static int evergreen_startup(struct radeon_device *rdev)
4673{
Christian Königf2ba57b2013-04-08 12:41:29 +02004674 struct radeon_ring *ring;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004675 int r;
4676
Alex Deucher9e46a482011-01-06 18:49:35 -05004677 /* enable pcie gen2 link */
Ilija Hadziccd540332011-09-20 10:22:57 -04004678 evergreen_pcie_gen2_enable(rdev);
Alex Deucher9e46a482011-01-06 18:49:35 -05004679
Alex Deucher0af62b02011-01-06 21:19:31 -05004680 if (ASIC_IS_DCE5(rdev)) {
4681 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4682 r = ni_init_microcode(rdev);
4683 if (r) {
4684 DRM_ERROR("Failed to load firmware!\n");
4685 return r;
4686 }
4687 }
Alex Deucher755d8192011-03-02 20:07:34 -05004688 r = ni_mc_load_microcode(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004689 if (r) {
Alex Deucher0af62b02011-01-06 21:19:31 -05004690 DRM_ERROR("Failed to load MC firmware!\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004691 return r;
4692 }
Alex Deucher0af62b02011-01-06 21:19:31 -05004693 } else {
4694 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4695 r = r600_init_microcode(rdev);
4696 if (r) {
4697 DRM_ERROR("Failed to load firmware!\n");
4698 return r;
4699 }
4700 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004701 }
Alex Deucherfe251e22010-03-24 13:36:43 -04004702
Alex Deucher16cdf042011-10-28 10:30:02 -04004703 r = r600_vram_scratch_init(rdev);
4704 if (r)
4705 return r;
4706
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004707 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004708 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04004709 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004710 } else {
4711 r = evergreen_pcie_gart_enable(rdev);
4712 if (r)
4713 return r;
4714 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004715 evergreen_gpu_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004716
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04004717 r = evergreen_blit_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004718 if (r) {
Ilija Hadzicfb3d9e92011-10-12 23:29:41 -04004719 r600_blit_fini(rdev);
Alex Deucher27cd7762012-02-23 17:53:42 -05004720 rdev->asic->copy.copy = NULL;
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04004721 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004722 }
4723
Alex Deucher724c80e2010-08-27 18:25:25 -04004724 /* allocate wb buffer */
4725 r = radeon_wb_init(rdev);
4726 if (r)
4727 return r;
4728
Jerome Glisse30eb77f2011-11-20 20:45:34 +00004729 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4730 if (r) {
4731 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4732 return r;
4733 }
4734
Alex Deucher233d1ad2012-12-04 15:25:59 -05004735 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4736 if (r) {
4737 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4738 return r;
4739 }
4740
Christian Königf2ba57b2013-04-08 12:41:29 +02004741 r = rv770_uvd_resume(rdev);
4742 if (!r) {
4743 r = radeon_fence_driver_start_ring(rdev,
4744 R600_RING_TYPE_UVD_INDEX);
4745 if (r)
4746 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4747 }
4748
4749 if (r)
4750 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4751
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004752 /* Enable IRQ */
Adis Hamziće49f3952013-06-02 16:47:54 +02004753 if (!rdev->irq.installed) {
4754 r = radeon_irq_kms_init(rdev);
4755 if (r)
4756 return r;
4757 }
4758
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004759 r = r600_irq_init(rdev);
4760 if (r) {
4761 DRM_ERROR("radeon: IH init failed (%d).\n", r);
4762 radeon_irq_kms_fini(rdev);
4763 return r;
4764 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004765 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004766
Christian Königf2ba57b2013-04-08 12:41:29 +02004767 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Christian Könige32eb502011-10-23 12:56:27 +02004768 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
Alex Deucher78c55602011-11-17 14:25:56 -05004769 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4770 0, 0xfffff, RADEON_CP_PACKET2);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004771 if (r)
4772 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05004773
4774 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4775 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4776 DMA_RB_RPTR, DMA_RB_WPTR,
Jerome Glisse0fcb6152013-01-14 11:32:27 -05004777 2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05004778 if (r)
4779 return r;
4780
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004781 r = evergreen_cp_load_microcode(rdev);
4782 if (r)
4783 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04004784 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004785 if (r)
4786 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05004787 r = r600_dma_resume(rdev);
4788 if (r)
4789 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04004790
Christian Königf2ba57b2013-04-08 12:41:29 +02004791 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4792 if (ring->ring_size) {
4793 r = radeon_ring_init(rdev, ring, ring->ring_size,
4794 R600_WB_UVD_RPTR_OFFSET,
4795 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4796 0, 0xfffff, RADEON_CP_PACKET2);
4797 if (!r)
4798 r = r600_uvd_init(rdev);
4799
4800 if (r)
4801 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4802 }
4803
Christian König2898c342012-07-05 11:55:34 +02004804 r = radeon_ib_pool_init(rdev);
4805 if (r) {
4806 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
Jerome Glisseb15ba512011-11-15 11:48:34 -05004807 return r;
Christian König2898c342012-07-05 11:55:34 +02004808 }
Jerome Glisseb15ba512011-11-15 11:48:34 -05004809
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01004810 r = r600_audio_init(rdev);
4811 if (r) {
4812 DRM_ERROR("radeon: audio init failed\n");
Jerome Glisseb15ba512011-11-15 11:48:34 -05004813 return r;
4814 }
4815
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004816 return 0;
4817}
4818
4819int evergreen_resume(struct radeon_device *rdev)
4820{
4821 int r;
4822
Alex Deucher86f5c9e2010-12-20 12:35:04 -05004823 /* reset the asic, the gfx blocks are often in a bad state
4824 * after the driver is unloaded or after a resume
4825 */
4826 if (radeon_asic_reset(rdev))
4827 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004828 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4829 * posting will perform necessary task to bring back GPU into good
4830 * shape.
4831 */
4832 /* post card */
4833 atom_asic_init(rdev->mode_info.atom_context);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004834
Alex Deucherd4788db2013-02-28 14:40:09 -05004835 /* init golden registers */
4836 evergreen_init_golden_registers(rdev);
4837
Jerome Glisseb15ba512011-11-15 11:48:34 -05004838 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004839 r = evergreen_startup(rdev);
4840 if (r) {
Alex Deucher755d8192011-03-02 20:07:34 -05004841 DRM_ERROR("evergreen startup failed on resume\n");
Jerome Glisse6b7746e2012-02-20 17:57:20 -05004842 rdev->accel_working = false;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004843 return r;
4844 }
Alex Deucherfe251e22010-03-24 13:36:43 -04004845
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004846 return r;
4847
4848}
4849
4850int evergreen_suspend(struct radeon_device *rdev)
4851{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01004852 r600_audio_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02004853 radeon_uvd_suspend(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004854 r700_cp_stop(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004855 r600_dma_stop(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02004856 r600_uvd_rbc_stop(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004857 evergreen_irq_suspend(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04004858 radeon_wb_disable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004859 evergreen_pcie_gart_disable(rdev);
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04004860
4861 return 0;
4862}
4863
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004864/* Plan is to move initialization in that function and use
4865 * helper function so that radeon_device_init pretty much
4866 * do nothing more than calling asic specific function. This
4867 * should also allow to remove a bunch of callback function
4868 * like vram_info.
4869 */
4870int evergreen_init(struct radeon_device *rdev)
4871{
4872 int r;
4873
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004874 /* Read BIOS */
4875 if (!radeon_get_bios(rdev)) {
4876 if (ASIC_IS_AVIVO(rdev))
4877 return -EINVAL;
4878 }
4879 /* Must be an ATOMBIOS */
4880 if (!rdev->is_atom_bios) {
Alex Deucher755d8192011-03-02 20:07:34 -05004881 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004882 return -EINVAL;
4883 }
4884 r = radeon_atombios_init(rdev);
4885 if (r)
4886 return r;
Alex Deucher86f5c9e2010-12-20 12:35:04 -05004887 /* reset the asic, the gfx blocks are often in a bad state
4888 * after the driver is unloaded or after a resume
4889 */
4890 if (radeon_asic_reset(rdev))
4891 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004892 /* Post card if necessary */
Alex Deucherfd909c32011-01-11 18:08:59 -05004893 if (!radeon_card_posted(rdev)) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004894 if (!rdev->bios) {
4895 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4896 return -EINVAL;
4897 }
4898 DRM_INFO("GPU not posted. posting now...\n");
4899 atom_asic_init(rdev->mode_info.atom_context);
4900 }
Alex Deucherd4788db2013-02-28 14:40:09 -05004901 /* init golden registers */
4902 evergreen_init_golden_registers(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004903 /* Initialize scratch registers */
4904 r600_scratch_init(rdev);
4905 /* Initialize surface registers */
4906 radeon_surface_init(rdev);
4907 /* Initialize clocks */
4908 radeon_get_clock_info(rdev->ddev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004909 /* Fence driver */
4910 r = radeon_fence_driver_init(rdev);
4911 if (r)
4912 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00004913 /* initialize AGP */
4914 if (rdev->flags & RADEON_IS_AGP) {
4915 r = radeon_agp_init(rdev);
4916 if (r)
4917 radeon_agp_disable(rdev);
4918 }
4919 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004920 r = evergreen_mc_init(rdev);
4921 if (r)
4922 return r;
4923 /* Memory manager */
4924 r = radeon_bo_init(rdev);
4925 if (r)
4926 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04004927
Christian Könige32eb502011-10-23 12:56:27 +02004928 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4929 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004930
Alex Deucher233d1ad2012-12-04 15:25:59 -05004931 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4932 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4933
Christian Königf2ba57b2013-04-08 12:41:29 +02004934 r = radeon_uvd_init(rdev);
4935 if (!r) {
4936 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4937 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4938 4096);
4939 }
4940
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004941 rdev->ih.ring_obj = NULL;
4942 r600_ih_ring_init(rdev, 64 * 1024);
4943
4944 r = r600_pcie_gart_init(rdev);
4945 if (r)
4946 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04004947
Alex Deucher148a03b2010-06-03 19:00:03 -04004948 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004949 r = evergreen_startup(rdev);
4950 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04004951 dev_err(rdev->dev, "disabling GPU acceleration\n");
4952 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004953 r600_dma_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04004954 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04004955 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02004956 radeon_ib_pool_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04004957 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04004958 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004959 rdev->accel_working = false;
4960 }
Alex Deucher77e00f22011-12-21 11:58:17 -05004961
4962 /* Don't start up if the MC ucode is missing on BTC parts.
4963 * The default clocks and voltages before the MC ucode
4964 * is loaded are not suffient for advanced operations.
4965 */
4966 if (ASIC_IS_DCE5(rdev)) {
4967 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4968 DRM_ERROR("radeon: MC ucode required for NI+.\n");
4969 return -EINVAL;
4970 }
4971 }
4972
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004973 return 0;
4974}
4975
4976void evergreen_fini(struct radeon_device *rdev)
4977{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01004978 r600_audio_fini(rdev);
Ilija Hadzicfb3d9e92011-10-12 23:29:41 -04004979 r600_blit_fini(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004980 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004981 r600_dma_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004982 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04004983 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02004984 radeon_ib_pool_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004985 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004986 evergreen_pcie_gart_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02004987 radeon_uvd_fini(rdev);
Alex Deucher16cdf042011-10-28 10:30:02 -04004988 r600_vram_scratch_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004989 radeon_gem_fini(rdev);
4990 radeon_fence_driver_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004991 radeon_agp_fini(rdev);
4992 radeon_bo_fini(rdev);
4993 radeon_atombios_fini(rdev);
4994 kfree(rdev->bios);
4995 rdev->bios = NULL;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004996}
Alex Deucher9e46a482011-01-06 18:49:35 -05004997
Ilija Hadzicb07759b2011-09-20 10:22:58 -04004998void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
Alex Deucher9e46a482011-01-06 18:49:35 -05004999{
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005000 u32 link_width_cntl, speed_cntl;
Alex Deucher9e46a482011-01-06 18:49:35 -05005001
Alex Deucherd42dd572011-01-12 20:05:11 -05005002 if (radeon_pcie_gen2 == 0)
5003 return;
5004
Alex Deucher9e46a482011-01-06 18:49:35 -05005005 if (rdev->flags & RADEON_IS_IGP)
5006 return;
5007
5008 if (!(rdev->flags & RADEON_IS_PCIE))
5009 return;
5010
5011 /* x2 cards have a special sequence */
5012 if (ASIC_IS_X2(rdev))
5013 return;
5014
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005015 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5016 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
Dave Airlie197bbb32012-06-27 08:35:54 +01005017 return;
5018
Alex Deucher492d2b62012-10-25 16:06:59 -04005019 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher3691fee2012-10-08 17:46:27 -04005020 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5021 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5022 return;
5023 }
5024
Dave Airlie197bbb32012-06-27 08:35:54 +01005025 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5026
Alex Deucher9e46a482011-01-06 18:49:35 -05005027 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5028 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5029
Alex Deucher492d2b62012-10-25 16:06:59 -04005030 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005031 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005032 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005033
Alex Deucher492d2b62012-10-25 16:06:59 -04005034 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005035 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
Alex Deucher492d2b62012-10-25 16:06:59 -04005036 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005037
Alex Deucher492d2b62012-10-25 16:06:59 -04005038 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005039 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005040 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005041
Alex Deucher492d2b62012-10-25 16:06:59 -04005042 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005043 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005044 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005045
Alex Deucher492d2b62012-10-25 16:06:59 -04005046 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005047 speed_cntl |= LC_GEN2_EN_STRAP;
Alex Deucher492d2b62012-10-25 16:06:59 -04005048 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005049
5050 } else {
Alex Deucher492d2b62012-10-25 16:06:59 -04005051 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005052 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5053 if (1)
5054 link_width_cntl |= LC_UPCONFIGURE_DIS;
5055 else
5056 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005057 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005058 }
5059}