blob: 536908109001982e02ae1732a2908598f15dca42 [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
David Howells760285e2012-10-02 18:01:07 +010027#include <drm/drmP.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050028#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
David Howells760285e2012-10-02 18:01:07 +010030#include <drm/radeon_drm.h>
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
Alex Deucher2281a372010-10-21 13:31:38 -040035#include "evergreen_blit_shaders.h"
Alex Deucher138e4e12013-01-11 15:33:13 -050036#include "radeon_ucode.h"
Alex Deucherfe251e22010-03-24 13:36:43 -040037
Alex Deucher4a159032012-08-15 17:13:53 -040038static const u32 crtc_offsets[6] =
39{
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
46};
47
Alex Deucher2948f5e2013-04-12 13:52:52 -040048#include "clearstate_evergreen.h"
49
Alex Deucher1fd11772013-04-17 17:53:50 -040050static const u32 sumo_rlc_save_restore_register_list[] =
Alex Deucher2948f5e2013-04-12 13:52:52 -040051{
52 0x98fc,
53 0x9830,
54 0x9834,
55 0x9838,
56 0x9870,
57 0x9874,
58 0x8a14,
59 0x8b24,
60 0x8bcc,
61 0x8b10,
62 0x8d00,
63 0x8d04,
64 0x8c00,
65 0x8c04,
66 0x8c08,
67 0x8c0c,
68 0x8d8c,
69 0x8c20,
70 0x8c24,
71 0x8c28,
72 0x8c18,
73 0x8c1c,
74 0x8cf0,
75 0x8e2c,
76 0x8e38,
77 0x8c30,
78 0x9508,
79 0x9688,
80 0x9608,
81 0x960c,
82 0x9610,
83 0x9614,
84 0x88c4,
85 0x88d4,
86 0xa008,
87 0x900c,
88 0x9100,
89 0x913c,
90 0x98f8,
91 0x98f4,
92 0x9b7c,
93 0x3f8c,
94 0x8950,
95 0x8954,
96 0x8a18,
97 0x8b28,
98 0x9144,
99 0x9148,
100 0x914c,
101 0x3f90,
102 0x3f94,
103 0x915c,
104 0x9160,
105 0x9178,
106 0x917c,
107 0x9180,
108 0x918c,
109 0x9190,
110 0x9194,
111 0x9198,
112 0x919c,
113 0x91a8,
114 0x91ac,
115 0x91b0,
116 0x91b4,
117 0x91b8,
118 0x91c4,
119 0x91c8,
120 0x91cc,
121 0x91d0,
122 0x91d4,
123 0x91e0,
124 0x91e4,
125 0x91ec,
126 0x91f0,
127 0x91f4,
128 0x9200,
129 0x9204,
130 0x929c,
131 0x9150,
132 0x802c,
133};
Alex Deucher2948f5e2013-04-12 13:52:52 -0400134
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500135static void evergreen_gpu_init(struct radeon_device *rdev);
136void evergreen_fini(struct radeon_device *rdev);
Ilija Hadzicb07759b2011-09-20 10:22:58 -0400137void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
Alex Deucherf52382d2013-02-15 11:02:50 -0500138void evergreen_program_aspm(struct radeon_device *rdev);
Alex Deucher1b370782011-11-17 20:13:28 -0500139extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140 int ring, u32 cp_int_cntl);
Alex Deucher54e2e492013-06-13 18:26:25 -0400141extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142 u32 status, u32 addr);
Alex Deucher22c775c2013-07-23 09:41:05 -0400143void cik_init_cp_pg_table(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500144
Alex Deucher59a82d02013-08-13 12:48:06 -0400145extern u32 si_get_csb_size(struct radeon_device *rdev);
146extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147
Alex Deucherd4788db2013-02-28 14:40:09 -0500148static const u32 evergreen_golden_registers[] =
149{
150 0x3f90, 0xffff0000, 0xff000000,
151 0x9148, 0xffff0000, 0xff000000,
152 0x3f94, 0xffff0000, 0xff000000,
153 0x914c, 0xffff0000, 0xff000000,
154 0x9b7c, 0xffffffff, 0x00000000,
155 0x8a14, 0xffffffff, 0x00000007,
156 0x8b10, 0xffffffff, 0x00000000,
157 0x960c, 0xffffffff, 0x54763210,
158 0x88c4, 0xffffffff, 0x000000c2,
159 0x88d4, 0xffffffff, 0x00000010,
160 0x8974, 0xffffffff, 0x00000000,
161 0xc78, 0x00000080, 0x00000080,
162 0x5eb4, 0xffffffff, 0x00000002,
163 0x5e78, 0xffffffff, 0x001000f0,
164 0x6104, 0x01000300, 0x00000000,
165 0x5bc0, 0x00300000, 0x00000000,
166 0x7030, 0xffffffff, 0x00000011,
167 0x7c30, 0xffffffff, 0x00000011,
168 0x10830, 0xffffffff, 0x00000011,
169 0x11430, 0xffffffff, 0x00000011,
170 0x12030, 0xffffffff, 0x00000011,
171 0x12c30, 0xffffffff, 0x00000011,
172 0xd02c, 0xffffffff, 0x08421000,
173 0x240c, 0xffffffff, 0x00000380,
174 0x8b24, 0xffffffff, 0x00ff0fff,
175 0x28a4c, 0x06000000, 0x06000000,
176 0x10c, 0x00000001, 0x00000001,
177 0x8d00, 0xffffffff, 0x100e4848,
178 0x8d04, 0xffffffff, 0x00164745,
179 0x8c00, 0xffffffff, 0xe4000003,
180 0x8c04, 0xffffffff, 0x40600060,
181 0x8c08, 0xffffffff, 0x001c001c,
182 0x8cf0, 0xffffffff, 0x08e00620,
183 0x8c20, 0xffffffff, 0x00800080,
184 0x8c24, 0xffffffff, 0x00800080,
185 0x8c18, 0xffffffff, 0x20202078,
186 0x8c1c, 0xffffffff, 0x00001010,
187 0x28350, 0xffffffff, 0x00000000,
188 0xa008, 0xffffffff, 0x00010000,
189 0x5cc, 0xffffffff, 0x00000001,
190 0x9508, 0xffffffff, 0x00000002,
191 0x913c, 0x0000000f, 0x0000000a
192};
193
194static const u32 evergreen_golden_registers2[] =
195{
196 0x2f4c, 0xffffffff, 0x00000000,
197 0x54f4, 0xffffffff, 0x00000000,
198 0x54f0, 0xffffffff, 0x00000000,
199 0x5498, 0xffffffff, 0x00000000,
200 0x549c, 0xffffffff, 0x00000000,
201 0x5494, 0xffffffff, 0x00000000,
202 0x53cc, 0xffffffff, 0x00000000,
203 0x53c8, 0xffffffff, 0x00000000,
204 0x53c4, 0xffffffff, 0x00000000,
205 0x53c0, 0xffffffff, 0x00000000,
206 0x53bc, 0xffffffff, 0x00000000,
207 0x53b8, 0xffffffff, 0x00000000,
208 0x53b4, 0xffffffff, 0x00000000,
209 0x53b0, 0xffffffff, 0x00000000
210};
211
212static const u32 cypress_mgcg_init[] =
213{
214 0x802c, 0xffffffff, 0xc0000000,
215 0x5448, 0xffffffff, 0x00000100,
216 0x55e4, 0xffffffff, 0x00000100,
217 0x160c, 0xffffffff, 0x00000100,
218 0x5644, 0xffffffff, 0x00000100,
219 0xc164, 0xffffffff, 0x00000100,
220 0x8a18, 0xffffffff, 0x00000100,
221 0x897c, 0xffffffff, 0x06000100,
222 0x8b28, 0xffffffff, 0x00000100,
223 0x9144, 0xffffffff, 0x00000100,
224 0x9a60, 0xffffffff, 0x00000100,
225 0x9868, 0xffffffff, 0x00000100,
226 0x8d58, 0xffffffff, 0x00000100,
227 0x9510, 0xffffffff, 0x00000100,
228 0x949c, 0xffffffff, 0x00000100,
229 0x9654, 0xffffffff, 0x00000100,
230 0x9030, 0xffffffff, 0x00000100,
231 0x9034, 0xffffffff, 0x00000100,
232 0x9038, 0xffffffff, 0x00000100,
233 0x903c, 0xffffffff, 0x00000100,
234 0x9040, 0xffffffff, 0x00000100,
235 0xa200, 0xffffffff, 0x00000100,
236 0xa204, 0xffffffff, 0x00000100,
237 0xa208, 0xffffffff, 0x00000100,
238 0xa20c, 0xffffffff, 0x00000100,
239 0x971c, 0xffffffff, 0x00000100,
240 0x977c, 0xffffffff, 0x00000100,
241 0x3f80, 0xffffffff, 0x00000100,
242 0xa210, 0xffffffff, 0x00000100,
243 0xa214, 0xffffffff, 0x00000100,
244 0x4d8, 0xffffffff, 0x00000100,
245 0x9784, 0xffffffff, 0x00000100,
246 0x9698, 0xffffffff, 0x00000100,
247 0x4d4, 0xffffffff, 0x00000200,
248 0x30cc, 0xffffffff, 0x00000100,
249 0xd0c0, 0xffffffff, 0xff000100,
250 0x802c, 0xffffffff, 0x40000000,
251 0x915c, 0xffffffff, 0x00010000,
252 0x9160, 0xffffffff, 0x00030002,
253 0x9178, 0xffffffff, 0x00070000,
254 0x917c, 0xffffffff, 0x00030002,
255 0x9180, 0xffffffff, 0x00050004,
256 0x918c, 0xffffffff, 0x00010006,
257 0x9190, 0xffffffff, 0x00090008,
258 0x9194, 0xffffffff, 0x00070000,
259 0x9198, 0xffffffff, 0x00030002,
260 0x919c, 0xffffffff, 0x00050004,
261 0x91a8, 0xffffffff, 0x00010006,
262 0x91ac, 0xffffffff, 0x00090008,
263 0x91b0, 0xffffffff, 0x00070000,
264 0x91b4, 0xffffffff, 0x00030002,
265 0x91b8, 0xffffffff, 0x00050004,
266 0x91c4, 0xffffffff, 0x00010006,
267 0x91c8, 0xffffffff, 0x00090008,
268 0x91cc, 0xffffffff, 0x00070000,
269 0x91d0, 0xffffffff, 0x00030002,
270 0x91d4, 0xffffffff, 0x00050004,
271 0x91e0, 0xffffffff, 0x00010006,
272 0x91e4, 0xffffffff, 0x00090008,
273 0x91e8, 0xffffffff, 0x00000000,
274 0x91ec, 0xffffffff, 0x00070000,
275 0x91f0, 0xffffffff, 0x00030002,
276 0x91f4, 0xffffffff, 0x00050004,
277 0x9200, 0xffffffff, 0x00010006,
278 0x9204, 0xffffffff, 0x00090008,
279 0x9208, 0xffffffff, 0x00070000,
280 0x920c, 0xffffffff, 0x00030002,
281 0x9210, 0xffffffff, 0x00050004,
282 0x921c, 0xffffffff, 0x00010006,
283 0x9220, 0xffffffff, 0x00090008,
284 0x9224, 0xffffffff, 0x00070000,
285 0x9228, 0xffffffff, 0x00030002,
286 0x922c, 0xffffffff, 0x00050004,
287 0x9238, 0xffffffff, 0x00010006,
288 0x923c, 0xffffffff, 0x00090008,
289 0x9240, 0xffffffff, 0x00070000,
290 0x9244, 0xffffffff, 0x00030002,
291 0x9248, 0xffffffff, 0x00050004,
292 0x9254, 0xffffffff, 0x00010006,
293 0x9258, 0xffffffff, 0x00090008,
294 0x925c, 0xffffffff, 0x00070000,
295 0x9260, 0xffffffff, 0x00030002,
296 0x9264, 0xffffffff, 0x00050004,
297 0x9270, 0xffffffff, 0x00010006,
298 0x9274, 0xffffffff, 0x00090008,
299 0x9278, 0xffffffff, 0x00070000,
300 0x927c, 0xffffffff, 0x00030002,
301 0x9280, 0xffffffff, 0x00050004,
302 0x928c, 0xffffffff, 0x00010006,
303 0x9290, 0xffffffff, 0x00090008,
304 0x9294, 0xffffffff, 0x00000000,
305 0x929c, 0xffffffff, 0x00000001,
306 0x802c, 0xffffffff, 0x40010000,
307 0x915c, 0xffffffff, 0x00010000,
308 0x9160, 0xffffffff, 0x00030002,
309 0x9178, 0xffffffff, 0x00070000,
310 0x917c, 0xffffffff, 0x00030002,
311 0x9180, 0xffffffff, 0x00050004,
312 0x918c, 0xffffffff, 0x00010006,
313 0x9190, 0xffffffff, 0x00090008,
314 0x9194, 0xffffffff, 0x00070000,
315 0x9198, 0xffffffff, 0x00030002,
316 0x919c, 0xffffffff, 0x00050004,
317 0x91a8, 0xffffffff, 0x00010006,
318 0x91ac, 0xffffffff, 0x00090008,
319 0x91b0, 0xffffffff, 0x00070000,
320 0x91b4, 0xffffffff, 0x00030002,
321 0x91b8, 0xffffffff, 0x00050004,
322 0x91c4, 0xffffffff, 0x00010006,
323 0x91c8, 0xffffffff, 0x00090008,
324 0x91cc, 0xffffffff, 0x00070000,
325 0x91d0, 0xffffffff, 0x00030002,
326 0x91d4, 0xffffffff, 0x00050004,
327 0x91e0, 0xffffffff, 0x00010006,
328 0x91e4, 0xffffffff, 0x00090008,
329 0x91e8, 0xffffffff, 0x00000000,
330 0x91ec, 0xffffffff, 0x00070000,
331 0x91f0, 0xffffffff, 0x00030002,
332 0x91f4, 0xffffffff, 0x00050004,
333 0x9200, 0xffffffff, 0x00010006,
334 0x9204, 0xffffffff, 0x00090008,
335 0x9208, 0xffffffff, 0x00070000,
336 0x920c, 0xffffffff, 0x00030002,
337 0x9210, 0xffffffff, 0x00050004,
338 0x921c, 0xffffffff, 0x00010006,
339 0x9220, 0xffffffff, 0x00090008,
340 0x9224, 0xffffffff, 0x00070000,
341 0x9228, 0xffffffff, 0x00030002,
342 0x922c, 0xffffffff, 0x00050004,
343 0x9238, 0xffffffff, 0x00010006,
344 0x923c, 0xffffffff, 0x00090008,
345 0x9240, 0xffffffff, 0x00070000,
346 0x9244, 0xffffffff, 0x00030002,
347 0x9248, 0xffffffff, 0x00050004,
348 0x9254, 0xffffffff, 0x00010006,
349 0x9258, 0xffffffff, 0x00090008,
350 0x925c, 0xffffffff, 0x00070000,
351 0x9260, 0xffffffff, 0x00030002,
352 0x9264, 0xffffffff, 0x00050004,
353 0x9270, 0xffffffff, 0x00010006,
354 0x9274, 0xffffffff, 0x00090008,
355 0x9278, 0xffffffff, 0x00070000,
356 0x927c, 0xffffffff, 0x00030002,
357 0x9280, 0xffffffff, 0x00050004,
358 0x928c, 0xffffffff, 0x00010006,
359 0x9290, 0xffffffff, 0x00090008,
360 0x9294, 0xffffffff, 0x00000000,
361 0x929c, 0xffffffff, 0x00000001,
362 0x802c, 0xffffffff, 0xc0000000
363};
364
365static const u32 redwood_mgcg_init[] =
366{
367 0x802c, 0xffffffff, 0xc0000000,
368 0x5448, 0xffffffff, 0x00000100,
369 0x55e4, 0xffffffff, 0x00000100,
370 0x160c, 0xffffffff, 0x00000100,
371 0x5644, 0xffffffff, 0x00000100,
372 0xc164, 0xffffffff, 0x00000100,
373 0x8a18, 0xffffffff, 0x00000100,
374 0x897c, 0xffffffff, 0x06000100,
375 0x8b28, 0xffffffff, 0x00000100,
376 0x9144, 0xffffffff, 0x00000100,
377 0x9a60, 0xffffffff, 0x00000100,
378 0x9868, 0xffffffff, 0x00000100,
379 0x8d58, 0xffffffff, 0x00000100,
380 0x9510, 0xffffffff, 0x00000100,
381 0x949c, 0xffffffff, 0x00000100,
382 0x9654, 0xffffffff, 0x00000100,
383 0x9030, 0xffffffff, 0x00000100,
384 0x9034, 0xffffffff, 0x00000100,
385 0x9038, 0xffffffff, 0x00000100,
386 0x903c, 0xffffffff, 0x00000100,
387 0x9040, 0xffffffff, 0x00000100,
388 0xa200, 0xffffffff, 0x00000100,
389 0xa204, 0xffffffff, 0x00000100,
390 0xa208, 0xffffffff, 0x00000100,
391 0xa20c, 0xffffffff, 0x00000100,
392 0x971c, 0xffffffff, 0x00000100,
393 0x977c, 0xffffffff, 0x00000100,
394 0x3f80, 0xffffffff, 0x00000100,
395 0xa210, 0xffffffff, 0x00000100,
396 0xa214, 0xffffffff, 0x00000100,
397 0x4d8, 0xffffffff, 0x00000100,
398 0x9784, 0xffffffff, 0x00000100,
399 0x9698, 0xffffffff, 0x00000100,
400 0x4d4, 0xffffffff, 0x00000200,
401 0x30cc, 0xffffffff, 0x00000100,
402 0xd0c0, 0xffffffff, 0xff000100,
403 0x802c, 0xffffffff, 0x40000000,
404 0x915c, 0xffffffff, 0x00010000,
405 0x9160, 0xffffffff, 0x00030002,
406 0x9178, 0xffffffff, 0x00070000,
407 0x917c, 0xffffffff, 0x00030002,
408 0x9180, 0xffffffff, 0x00050004,
409 0x918c, 0xffffffff, 0x00010006,
410 0x9190, 0xffffffff, 0x00090008,
411 0x9194, 0xffffffff, 0x00070000,
412 0x9198, 0xffffffff, 0x00030002,
413 0x919c, 0xffffffff, 0x00050004,
414 0x91a8, 0xffffffff, 0x00010006,
415 0x91ac, 0xffffffff, 0x00090008,
416 0x91b0, 0xffffffff, 0x00070000,
417 0x91b4, 0xffffffff, 0x00030002,
418 0x91b8, 0xffffffff, 0x00050004,
419 0x91c4, 0xffffffff, 0x00010006,
420 0x91c8, 0xffffffff, 0x00090008,
421 0x91cc, 0xffffffff, 0x00070000,
422 0x91d0, 0xffffffff, 0x00030002,
423 0x91d4, 0xffffffff, 0x00050004,
424 0x91e0, 0xffffffff, 0x00010006,
425 0x91e4, 0xffffffff, 0x00090008,
426 0x91e8, 0xffffffff, 0x00000000,
427 0x91ec, 0xffffffff, 0x00070000,
428 0x91f0, 0xffffffff, 0x00030002,
429 0x91f4, 0xffffffff, 0x00050004,
430 0x9200, 0xffffffff, 0x00010006,
431 0x9204, 0xffffffff, 0x00090008,
432 0x9294, 0xffffffff, 0x00000000,
433 0x929c, 0xffffffff, 0x00000001,
434 0x802c, 0xffffffff, 0xc0000000
435};
436
437static const u32 cedar_golden_registers[] =
438{
439 0x3f90, 0xffff0000, 0xff000000,
440 0x9148, 0xffff0000, 0xff000000,
441 0x3f94, 0xffff0000, 0xff000000,
442 0x914c, 0xffff0000, 0xff000000,
443 0x9b7c, 0xffffffff, 0x00000000,
444 0x8a14, 0xffffffff, 0x00000007,
445 0x8b10, 0xffffffff, 0x00000000,
446 0x960c, 0xffffffff, 0x54763210,
447 0x88c4, 0xffffffff, 0x000000c2,
448 0x88d4, 0xffffffff, 0x00000000,
449 0x8974, 0xffffffff, 0x00000000,
450 0xc78, 0x00000080, 0x00000080,
451 0x5eb4, 0xffffffff, 0x00000002,
452 0x5e78, 0xffffffff, 0x001000f0,
453 0x6104, 0x01000300, 0x00000000,
454 0x5bc0, 0x00300000, 0x00000000,
455 0x7030, 0xffffffff, 0x00000011,
456 0x7c30, 0xffffffff, 0x00000011,
457 0x10830, 0xffffffff, 0x00000011,
458 0x11430, 0xffffffff, 0x00000011,
459 0xd02c, 0xffffffff, 0x08421000,
460 0x240c, 0xffffffff, 0x00000380,
461 0x8b24, 0xffffffff, 0x00ff0fff,
462 0x28a4c, 0x06000000, 0x06000000,
463 0x10c, 0x00000001, 0x00000001,
464 0x8d00, 0xffffffff, 0x100e4848,
465 0x8d04, 0xffffffff, 0x00164745,
466 0x8c00, 0xffffffff, 0xe4000003,
467 0x8c04, 0xffffffff, 0x40600060,
468 0x8c08, 0xffffffff, 0x001c001c,
469 0x8cf0, 0xffffffff, 0x08e00410,
470 0x8c20, 0xffffffff, 0x00800080,
471 0x8c24, 0xffffffff, 0x00800080,
472 0x8c18, 0xffffffff, 0x20202078,
473 0x8c1c, 0xffffffff, 0x00001010,
474 0x28350, 0xffffffff, 0x00000000,
475 0xa008, 0xffffffff, 0x00010000,
476 0x5cc, 0xffffffff, 0x00000001,
477 0x9508, 0xffffffff, 0x00000002
478};
479
480static const u32 cedar_mgcg_init[] =
481{
482 0x802c, 0xffffffff, 0xc0000000,
483 0x5448, 0xffffffff, 0x00000100,
484 0x55e4, 0xffffffff, 0x00000100,
485 0x160c, 0xffffffff, 0x00000100,
486 0x5644, 0xffffffff, 0x00000100,
487 0xc164, 0xffffffff, 0x00000100,
488 0x8a18, 0xffffffff, 0x00000100,
489 0x897c, 0xffffffff, 0x06000100,
490 0x8b28, 0xffffffff, 0x00000100,
491 0x9144, 0xffffffff, 0x00000100,
492 0x9a60, 0xffffffff, 0x00000100,
493 0x9868, 0xffffffff, 0x00000100,
494 0x8d58, 0xffffffff, 0x00000100,
495 0x9510, 0xffffffff, 0x00000100,
496 0x949c, 0xffffffff, 0x00000100,
497 0x9654, 0xffffffff, 0x00000100,
498 0x9030, 0xffffffff, 0x00000100,
499 0x9034, 0xffffffff, 0x00000100,
500 0x9038, 0xffffffff, 0x00000100,
501 0x903c, 0xffffffff, 0x00000100,
502 0x9040, 0xffffffff, 0x00000100,
503 0xa200, 0xffffffff, 0x00000100,
504 0xa204, 0xffffffff, 0x00000100,
505 0xa208, 0xffffffff, 0x00000100,
506 0xa20c, 0xffffffff, 0x00000100,
507 0x971c, 0xffffffff, 0x00000100,
508 0x977c, 0xffffffff, 0x00000100,
509 0x3f80, 0xffffffff, 0x00000100,
510 0xa210, 0xffffffff, 0x00000100,
511 0xa214, 0xffffffff, 0x00000100,
512 0x4d8, 0xffffffff, 0x00000100,
513 0x9784, 0xffffffff, 0x00000100,
514 0x9698, 0xffffffff, 0x00000100,
515 0x4d4, 0xffffffff, 0x00000200,
516 0x30cc, 0xffffffff, 0x00000100,
517 0xd0c0, 0xffffffff, 0xff000100,
518 0x802c, 0xffffffff, 0x40000000,
519 0x915c, 0xffffffff, 0x00010000,
520 0x9178, 0xffffffff, 0x00050000,
521 0x917c, 0xffffffff, 0x00030002,
522 0x918c, 0xffffffff, 0x00010004,
523 0x9190, 0xffffffff, 0x00070006,
524 0x9194, 0xffffffff, 0x00050000,
525 0x9198, 0xffffffff, 0x00030002,
526 0x91a8, 0xffffffff, 0x00010004,
527 0x91ac, 0xffffffff, 0x00070006,
528 0x91e8, 0xffffffff, 0x00000000,
529 0x9294, 0xffffffff, 0x00000000,
530 0x929c, 0xffffffff, 0x00000001,
531 0x802c, 0xffffffff, 0xc0000000
532};
533
534static const u32 juniper_mgcg_init[] =
535{
536 0x802c, 0xffffffff, 0xc0000000,
537 0x5448, 0xffffffff, 0x00000100,
538 0x55e4, 0xffffffff, 0x00000100,
539 0x160c, 0xffffffff, 0x00000100,
540 0x5644, 0xffffffff, 0x00000100,
541 0xc164, 0xffffffff, 0x00000100,
542 0x8a18, 0xffffffff, 0x00000100,
543 0x897c, 0xffffffff, 0x06000100,
544 0x8b28, 0xffffffff, 0x00000100,
545 0x9144, 0xffffffff, 0x00000100,
546 0x9a60, 0xffffffff, 0x00000100,
547 0x9868, 0xffffffff, 0x00000100,
548 0x8d58, 0xffffffff, 0x00000100,
549 0x9510, 0xffffffff, 0x00000100,
550 0x949c, 0xffffffff, 0x00000100,
551 0x9654, 0xffffffff, 0x00000100,
552 0x9030, 0xffffffff, 0x00000100,
553 0x9034, 0xffffffff, 0x00000100,
554 0x9038, 0xffffffff, 0x00000100,
555 0x903c, 0xffffffff, 0x00000100,
556 0x9040, 0xffffffff, 0x00000100,
557 0xa200, 0xffffffff, 0x00000100,
558 0xa204, 0xffffffff, 0x00000100,
559 0xa208, 0xffffffff, 0x00000100,
560 0xa20c, 0xffffffff, 0x00000100,
561 0x971c, 0xffffffff, 0x00000100,
562 0xd0c0, 0xffffffff, 0xff000100,
563 0x802c, 0xffffffff, 0x40000000,
564 0x915c, 0xffffffff, 0x00010000,
565 0x9160, 0xffffffff, 0x00030002,
566 0x9178, 0xffffffff, 0x00070000,
567 0x917c, 0xffffffff, 0x00030002,
568 0x9180, 0xffffffff, 0x00050004,
569 0x918c, 0xffffffff, 0x00010006,
570 0x9190, 0xffffffff, 0x00090008,
571 0x9194, 0xffffffff, 0x00070000,
572 0x9198, 0xffffffff, 0x00030002,
573 0x919c, 0xffffffff, 0x00050004,
574 0x91a8, 0xffffffff, 0x00010006,
575 0x91ac, 0xffffffff, 0x00090008,
576 0x91b0, 0xffffffff, 0x00070000,
577 0x91b4, 0xffffffff, 0x00030002,
578 0x91b8, 0xffffffff, 0x00050004,
579 0x91c4, 0xffffffff, 0x00010006,
580 0x91c8, 0xffffffff, 0x00090008,
581 0x91cc, 0xffffffff, 0x00070000,
582 0x91d0, 0xffffffff, 0x00030002,
583 0x91d4, 0xffffffff, 0x00050004,
584 0x91e0, 0xffffffff, 0x00010006,
585 0x91e4, 0xffffffff, 0x00090008,
586 0x91e8, 0xffffffff, 0x00000000,
587 0x91ec, 0xffffffff, 0x00070000,
588 0x91f0, 0xffffffff, 0x00030002,
589 0x91f4, 0xffffffff, 0x00050004,
590 0x9200, 0xffffffff, 0x00010006,
591 0x9204, 0xffffffff, 0x00090008,
592 0x9208, 0xffffffff, 0x00070000,
593 0x920c, 0xffffffff, 0x00030002,
594 0x9210, 0xffffffff, 0x00050004,
595 0x921c, 0xffffffff, 0x00010006,
596 0x9220, 0xffffffff, 0x00090008,
597 0x9224, 0xffffffff, 0x00070000,
598 0x9228, 0xffffffff, 0x00030002,
599 0x922c, 0xffffffff, 0x00050004,
600 0x9238, 0xffffffff, 0x00010006,
601 0x923c, 0xffffffff, 0x00090008,
602 0x9240, 0xffffffff, 0x00070000,
603 0x9244, 0xffffffff, 0x00030002,
604 0x9248, 0xffffffff, 0x00050004,
605 0x9254, 0xffffffff, 0x00010006,
606 0x9258, 0xffffffff, 0x00090008,
607 0x925c, 0xffffffff, 0x00070000,
608 0x9260, 0xffffffff, 0x00030002,
609 0x9264, 0xffffffff, 0x00050004,
610 0x9270, 0xffffffff, 0x00010006,
611 0x9274, 0xffffffff, 0x00090008,
612 0x9278, 0xffffffff, 0x00070000,
613 0x927c, 0xffffffff, 0x00030002,
614 0x9280, 0xffffffff, 0x00050004,
615 0x928c, 0xffffffff, 0x00010006,
616 0x9290, 0xffffffff, 0x00090008,
617 0x9294, 0xffffffff, 0x00000000,
618 0x929c, 0xffffffff, 0x00000001,
619 0x802c, 0xffffffff, 0xc0000000,
620 0x977c, 0xffffffff, 0x00000100,
621 0x3f80, 0xffffffff, 0x00000100,
622 0xa210, 0xffffffff, 0x00000100,
623 0xa214, 0xffffffff, 0x00000100,
624 0x4d8, 0xffffffff, 0x00000100,
625 0x9784, 0xffffffff, 0x00000100,
626 0x9698, 0xffffffff, 0x00000100,
627 0x4d4, 0xffffffff, 0x00000200,
628 0x30cc, 0xffffffff, 0x00000100,
629 0x802c, 0xffffffff, 0xc0000000
630};
631
632static const u32 supersumo_golden_registers[] =
633{
634 0x5eb4, 0xffffffff, 0x00000002,
635 0x5cc, 0xffffffff, 0x00000001,
636 0x7030, 0xffffffff, 0x00000011,
637 0x7c30, 0xffffffff, 0x00000011,
638 0x6104, 0x01000300, 0x00000000,
639 0x5bc0, 0x00300000, 0x00000000,
640 0x8c04, 0xffffffff, 0x40600060,
641 0x8c08, 0xffffffff, 0x001c001c,
642 0x8c20, 0xffffffff, 0x00800080,
643 0x8c24, 0xffffffff, 0x00800080,
644 0x8c18, 0xffffffff, 0x20202078,
645 0x8c1c, 0xffffffff, 0x00001010,
646 0x918c, 0xffffffff, 0x00010006,
647 0x91a8, 0xffffffff, 0x00010006,
648 0x91c4, 0xffffffff, 0x00010006,
649 0x91e0, 0xffffffff, 0x00010006,
650 0x9200, 0xffffffff, 0x00010006,
651 0x9150, 0xffffffff, 0x6e944040,
652 0x917c, 0xffffffff, 0x00030002,
653 0x9180, 0xffffffff, 0x00050004,
654 0x9198, 0xffffffff, 0x00030002,
655 0x919c, 0xffffffff, 0x00050004,
656 0x91b4, 0xffffffff, 0x00030002,
657 0x91b8, 0xffffffff, 0x00050004,
658 0x91d0, 0xffffffff, 0x00030002,
659 0x91d4, 0xffffffff, 0x00050004,
660 0x91f0, 0xffffffff, 0x00030002,
661 0x91f4, 0xffffffff, 0x00050004,
662 0x915c, 0xffffffff, 0x00010000,
663 0x9160, 0xffffffff, 0x00030002,
664 0x3f90, 0xffff0000, 0xff000000,
665 0x9178, 0xffffffff, 0x00070000,
666 0x9194, 0xffffffff, 0x00070000,
667 0x91b0, 0xffffffff, 0x00070000,
668 0x91cc, 0xffffffff, 0x00070000,
669 0x91ec, 0xffffffff, 0x00070000,
670 0x9148, 0xffff0000, 0xff000000,
671 0x9190, 0xffffffff, 0x00090008,
672 0x91ac, 0xffffffff, 0x00090008,
673 0x91c8, 0xffffffff, 0x00090008,
674 0x91e4, 0xffffffff, 0x00090008,
675 0x9204, 0xffffffff, 0x00090008,
676 0x3f94, 0xffff0000, 0xff000000,
677 0x914c, 0xffff0000, 0xff000000,
678 0x929c, 0xffffffff, 0x00000001,
679 0x8a18, 0xffffffff, 0x00000100,
680 0x8b28, 0xffffffff, 0x00000100,
681 0x9144, 0xffffffff, 0x00000100,
682 0x5644, 0xffffffff, 0x00000100,
683 0x9b7c, 0xffffffff, 0x00000000,
684 0x8030, 0xffffffff, 0x0000100a,
685 0x8a14, 0xffffffff, 0x00000007,
686 0x8b24, 0xffffffff, 0x00ff0fff,
687 0x8b10, 0xffffffff, 0x00000000,
688 0x28a4c, 0x06000000, 0x06000000,
689 0x4d8, 0xffffffff, 0x00000100,
690 0x913c, 0xffff000f, 0x0100000a,
691 0x960c, 0xffffffff, 0x54763210,
692 0x88c4, 0xffffffff, 0x000000c2,
693 0x88d4, 0xffffffff, 0x00000010,
694 0x8974, 0xffffffff, 0x00000000,
695 0xc78, 0x00000080, 0x00000080,
696 0x5e78, 0xffffffff, 0x001000f0,
697 0xd02c, 0xffffffff, 0x08421000,
698 0xa008, 0xffffffff, 0x00010000,
699 0x8d00, 0xffffffff, 0x100e4848,
700 0x8d04, 0xffffffff, 0x00164745,
701 0x8c00, 0xffffffff, 0xe4000003,
702 0x8cf0, 0x1fffffff, 0x08e00620,
703 0x28350, 0xffffffff, 0x00000000,
704 0x9508, 0xffffffff, 0x00000002
705};
706
707static const u32 sumo_golden_registers[] =
708{
709 0x900c, 0x00ffffff, 0x0017071f,
710 0x8c18, 0xffffffff, 0x10101060,
711 0x8c1c, 0xffffffff, 0x00001010,
712 0x8c30, 0x0000000f, 0x00000005,
713 0x9688, 0x0000000f, 0x00000007
714};
715
716static const u32 wrestler_golden_registers[] =
717{
718 0x5eb4, 0xffffffff, 0x00000002,
719 0x5cc, 0xffffffff, 0x00000001,
720 0x7030, 0xffffffff, 0x00000011,
721 0x7c30, 0xffffffff, 0x00000011,
722 0x6104, 0x01000300, 0x00000000,
723 0x5bc0, 0x00300000, 0x00000000,
724 0x918c, 0xffffffff, 0x00010006,
725 0x91a8, 0xffffffff, 0x00010006,
726 0x9150, 0xffffffff, 0x6e944040,
727 0x917c, 0xffffffff, 0x00030002,
728 0x9198, 0xffffffff, 0x00030002,
729 0x915c, 0xffffffff, 0x00010000,
730 0x3f90, 0xffff0000, 0xff000000,
731 0x9178, 0xffffffff, 0x00070000,
732 0x9194, 0xffffffff, 0x00070000,
733 0x9148, 0xffff0000, 0xff000000,
734 0x9190, 0xffffffff, 0x00090008,
735 0x91ac, 0xffffffff, 0x00090008,
736 0x3f94, 0xffff0000, 0xff000000,
737 0x914c, 0xffff0000, 0xff000000,
738 0x929c, 0xffffffff, 0x00000001,
739 0x8a18, 0xffffffff, 0x00000100,
740 0x8b28, 0xffffffff, 0x00000100,
741 0x9144, 0xffffffff, 0x00000100,
742 0x9b7c, 0xffffffff, 0x00000000,
743 0x8030, 0xffffffff, 0x0000100a,
744 0x8a14, 0xffffffff, 0x00000001,
745 0x8b24, 0xffffffff, 0x00ff0fff,
746 0x8b10, 0xffffffff, 0x00000000,
747 0x28a4c, 0x06000000, 0x06000000,
748 0x4d8, 0xffffffff, 0x00000100,
749 0x913c, 0xffff000f, 0x0100000a,
750 0x960c, 0xffffffff, 0x54763210,
751 0x88c4, 0xffffffff, 0x000000c2,
752 0x88d4, 0xffffffff, 0x00000010,
753 0x8974, 0xffffffff, 0x00000000,
754 0xc78, 0x00000080, 0x00000080,
755 0x5e78, 0xffffffff, 0x001000f0,
756 0xd02c, 0xffffffff, 0x08421000,
757 0xa008, 0xffffffff, 0x00010000,
758 0x8d00, 0xffffffff, 0x100e4848,
759 0x8d04, 0xffffffff, 0x00164745,
760 0x8c00, 0xffffffff, 0xe4000003,
761 0x8cf0, 0x1fffffff, 0x08e00410,
762 0x28350, 0xffffffff, 0x00000000,
763 0x9508, 0xffffffff, 0x00000002,
764 0x900c, 0xffffffff, 0x0017071f,
765 0x8c18, 0xffffffff, 0x10101060,
766 0x8c1c, 0xffffffff, 0x00001010
767};
768
769static const u32 barts_golden_registers[] =
770{
771 0x5eb4, 0xffffffff, 0x00000002,
772 0x5e78, 0x8f311ff1, 0x001000f0,
773 0x3f90, 0xffff0000, 0xff000000,
774 0x9148, 0xffff0000, 0xff000000,
775 0x3f94, 0xffff0000, 0xff000000,
776 0x914c, 0xffff0000, 0xff000000,
777 0xc78, 0x00000080, 0x00000080,
778 0xbd4, 0x70073777, 0x00010001,
779 0xd02c, 0xbfffff1f, 0x08421000,
780 0xd0b8, 0x03773777, 0x02011003,
781 0x5bc0, 0x00200000, 0x50100000,
782 0x98f8, 0x33773777, 0x02011003,
783 0x98fc, 0xffffffff, 0x76543210,
784 0x7030, 0x31000311, 0x00000011,
785 0x2f48, 0x00000007, 0x02011003,
786 0x6b28, 0x00000010, 0x00000012,
787 0x7728, 0x00000010, 0x00000012,
788 0x10328, 0x00000010, 0x00000012,
789 0x10f28, 0x00000010, 0x00000012,
790 0x11b28, 0x00000010, 0x00000012,
791 0x12728, 0x00000010, 0x00000012,
792 0x240c, 0x000007ff, 0x00000380,
793 0x8a14, 0xf000001f, 0x00000007,
794 0x8b24, 0x3fff3fff, 0x00ff0fff,
795 0x8b10, 0x0000ff0f, 0x00000000,
796 0x28a4c, 0x07ffffff, 0x06000000,
797 0x10c, 0x00000001, 0x00010003,
798 0xa02c, 0xffffffff, 0x0000009b,
799 0x913c, 0x0000000f, 0x0100000a,
800 0x8d00, 0xffff7f7f, 0x100e4848,
801 0x8d04, 0x00ffffff, 0x00164745,
802 0x8c00, 0xfffc0003, 0xe4000003,
803 0x8c04, 0xf8ff00ff, 0x40600060,
804 0x8c08, 0x00ff00ff, 0x001c001c,
805 0x8cf0, 0x1fff1fff, 0x08e00620,
806 0x8c20, 0x0fff0fff, 0x00800080,
807 0x8c24, 0x0fff0fff, 0x00800080,
808 0x8c18, 0xffffffff, 0x20202078,
809 0x8c1c, 0x0000ffff, 0x00001010,
810 0x28350, 0x00000f01, 0x00000000,
811 0x9508, 0x3700001f, 0x00000002,
812 0x960c, 0xffffffff, 0x54763210,
813 0x88c4, 0x001f3ae3, 0x000000c2,
814 0x88d4, 0x0000001f, 0x00000010,
815 0x8974, 0xffffffff, 0x00000000
816};
817
818static const u32 turks_golden_registers[] =
819{
820 0x5eb4, 0xffffffff, 0x00000002,
821 0x5e78, 0x8f311ff1, 0x001000f0,
822 0x8c8, 0x00003000, 0x00001070,
823 0x8cc, 0x000fffff, 0x00040035,
824 0x3f90, 0xffff0000, 0xfff00000,
825 0x9148, 0xffff0000, 0xfff00000,
826 0x3f94, 0xffff0000, 0xfff00000,
827 0x914c, 0xffff0000, 0xfff00000,
828 0xc78, 0x00000080, 0x00000080,
829 0xbd4, 0x00073007, 0x00010002,
830 0xd02c, 0xbfffff1f, 0x08421000,
831 0xd0b8, 0x03773777, 0x02010002,
832 0x5bc0, 0x00200000, 0x50100000,
833 0x98f8, 0x33773777, 0x00010002,
834 0x98fc, 0xffffffff, 0x33221100,
835 0x7030, 0x31000311, 0x00000011,
836 0x2f48, 0x33773777, 0x00010002,
837 0x6b28, 0x00000010, 0x00000012,
838 0x7728, 0x00000010, 0x00000012,
839 0x10328, 0x00000010, 0x00000012,
840 0x10f28, 0x00000010, 0x00000012,
841 0x11b28, 0x00000010, 0x00000012,
842 0x12728, 0x00000010, 0x00000012,
843 0x240c, 0x000007ff, 0x00000380,
844 0x8a14, 0xf000001f, 0x00000007,
845 0x8b24, 0x3fff3fff, 0x00ff0fff,
846 0x8b10, 0x0000ff0f, 0x00000000,
847 0x28a4c, 0x07ffffff, 0x06000000,
848 0x10c, 0x00000001, 0x00010003,
849 0xa02c, 0xffffffff, 0x0000009b,
850 0x913c, 0x0000000f, 0x0100000a,
851 0x8d00, 0xffff7f7f, 0x100e4848,
852 0x8d04, 0x00ffffff, 0x00164745,
853 0x8c00, 0xfffc0003, 0xe4000003,
854 0x8c04, 0xf8ff00ff, 0x40600060,
855 0x8c08, 0x00ff00ff, 0x001c001c,
856 0x8cf0, 0x1fff1fff, 0x08e00410,
857 0x8c20, 0x0fff0fff, 0x00800080,
858 0x8c24, 0x0fff0fff, 0x00800080,
859 0x8c18, 0xffffffff, 0x20202078,
860 0x8c1c, 0x0000ffff, 0x00001010,
861 0x28350, 0x00000f01, 0x00000000,
862 0x9508, 0x3700001f, 0x00000002,
863 0x960c, 0xffffffff, 0x54763210,
864 0x88c4, 0x001f3ae3, 0x000000c2,
865 0x88d4, 0x0000001f, 0x00000010,
866 0x8974, 0xffffffff, 0x00000000
867};
868
869static const u32 caicos_golden_registers[] =
870{
871 0x5eb4, 0xffffffff, 0x00000002,
872 0x5e78, 0x8f311ff1, 0x001000f0,
873 0x8c8, 0x00003420, 0x00001450,
874 0x8cc, 0x000fffff, 0x00040035,
875 0x3f90, 0xffff0000, 0xfffc0000,
876 0x9148, 0xffff0000, 0xfffc0000,
877 0x3f94, 0xffff0000, 0xfffc0000,
878 0x914c, 0xffff0000, 0xfffc0000,
879 0xc78, 0x00000080, 0x00000080,
880 0xbd4, 0x00073007, 0x00010001,
881 0xd02c, 0xbfffff1f, 0x08421000,
882 0xd0b8, 0x03773777, 0x02010001,
883 0x5bc0, 0x00200000, 0x50100000,
884 0x98f8, 0x33773777, 0x02010001,
885 0x98fc, 0xffffffff, 0x33221100,
886 0x7030, 0x31000311, 0x00000011,
887 0x2f48, 0x33773777, 0x02010001,
888 0x6b28, 0x00000010, 0x00000012,
889 0x7728, 0x00000010, 0x00000012,
890 0x10328, 0x00000010, 0x00000012,
891 0x10f28, 0x00000010, 0x00000012,
892 0x11b28, 0x00000010, 0x00000012,
893 0x12728, 0x00000010, 0x00000012,
894 0x240c, 0x000007ff, 0x00000380,
895 0x8a14, 0xf000001f, 0x00000001,
896 0x8b24, 0x3fff3fff, 0x00ff0fff,
897 0x8b10, 0x0000ff0f, 0x00000000,
898 0x28a4c, 0x07ffffff, 0x06000000,
899 0x10c, 0x00000001, 0x00010003,
900 0xa02c, 0xffffffff, 0x0000009b,
901 0x913c, 0x0000000f, 0x0100000a,
902 0x8d00, 0xffff7f7f, 0x100e4848,
903 0x8d04, 0x00ffffff, 0x00164745,
904 0x8c00, 0xfffc0003, 0xe4000003,
905 0x8c04, 0xf8ff00ff, 0x40600060,
906 0x8c08, 0x00ff00ff, 0x001c001c,
907 0x8cf0, 0x1fff1fff, 0x08e00410,
908 0x8c20, 0x0fff0fff, 0x00800080,
909 0x8c24, 0x0fff0fff, 0x00800080,
910 0x8c18, 0xffffffff, 0x20202078,
911 0x8c1c, 0x0000ffff, 0x00001010,
912 0x28350, 0x00000f01, 0x00000000,
913 0x9508, 0x3700001f, 0x00000002,
914 0x960c, 0xffffffff, 0x54763210,
915 0x88c4, 0x001f3ae3, 0x000000c2,
916 0x88d4, 0x0000001f, 0x00000010,
917 0x8974, 0xffffffff, 0x00000000
918};
919
920static void evergreen_init_golden_registers(struct radeon_device *rdev)
921{
922 switch (rdev->family) {
923 case CHIP_CYPRESS:
924 case CHIP_HEMLOCK:
925 radeon_program_register_sequence(rdev,
926 evergreen_golden_registers,
927 (const u32)ARRAY_SIZE(evergreen_golden_registers));
928 radeon_program_register_sequence(rdev,
929 evergreen_golden_registers2,
930 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
931 radeon_program_register_sequence(rdev,
932 cypress_mgcg_init,
933 (const u32)ARRAY_SIZE(cypress_mgcg_init));
934 break;
935 case CHIP_JUNIPER:
936 radeon_program_register_sequence(rdev,
937 evergreen_golden_registers,
938 (const u32)ARRAY_SIZE(evergreen_golden_registers));
939 radeon_program_register_sequence(rdev,
940 evergreen_golden_registers2,
941 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
942 radeon_program_register_sequence(rdev,
943 juniper_mgcg_init,
944 (const u32)ARRAY_SIZE(juniper_mgcg_init));
945 break;
946 case CHIP_REDWOOD:
947 radeon_program_register_sequence(rdev,
948 evergreen_golden_registers,
949 (const u32)ARRAY_SIZE(evergreen_golden_registers));
950 radeon_program_register_sequence(rdev,
951 evergreen_golden_registers2,
952 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
953 radeon_program_register_sequence(rdev,
954 redwood_mgcg_init,
955 (const u32)ARRAY_SIZE(redwood_mgcg_init));
956 break;
957 case CHIP_CEDAR:
958 radeon_program_register_sequence(rdev,
959 cedar_golden_registers,
960 (const u32)ARRAY_SIZE(cedar_golden_registers));
961 radeon_program_register_sequence(rdev,
962 evergreen_golden_registers2,
963 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
964 radeon_program_register_sequence(rdev,
965 cedar_mgcg_init,
966 (const u32)ARRAY_SIZE(cedar_mgcg_init));
967 break;
968 case CHIP_PALM:
969 radeon_program_register_sequence(rdev,
970 wrestler_golden_registers,
971 (const u32)ARRAY_SIZE(wrestler_golden_registers));
972 break;
973 case CHIP_SUMO:
974 radeon_program_register_sequence(rdev,
975 supersumo_golden_registers,
976 (const u32)ARRAY_SIZE(supersumo_golden_registers));
977 break;
978 case CHIP_SUMO2:
979 radeon_program_register_sequence(rdev,
980 supersumo_golden_registers,
981 (const u32)ARRAY_SIZE(supersumo_golden_registers));
982 radeon_program_register_sequence(rdev,
983 sumo_golden_registers,
984 (const u32)ARRAY_SIZE(sumo_golden_registers));
985 break;
986 case CHIP_BARTS:
987 radeon_program_register_sequence(rdev,
988 barts_golden_registers,
989 (const u32)ARRAY_SIZE(barts_golden_registers));
990 break;
991 case CHIP_TURKS:
992 radeon_program_register_sequence(rdev,
993 turks_golden_registers,
994 (const u32)ARRAY_SIZE(turks_golden_registers));
995 break;
996 case CHIP_CAICOS:
997 radeon_program_register_sequence(rdev,
998 caicos_golden_registers,
999 (const u32)ARRAY_SIZE(caicos_golden_registers));
1000 break;
1001 default:
1002 break;
1003 }
1004}
1005
Jerome Glisse285484e2011-12-16 17:03:42 -05001006void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1007 unsigned *bankh, unsigned *mtaspect,
1008 unsigned *tile_split)
1009{
1010 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1011 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1012 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1013 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1014 switch (*bankw) {
1015 default:
1016 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1017 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1018 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1019 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1020 }
1021 switch (*bankh) {
1022 default:
1023 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1024 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1025 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1026 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1027 }
1028 switch (*mtaspect) {
1029 default:
1030 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1031 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1032 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1033 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1034 }
1035}
1036
Alex Deucher23d33ba2013-04-08 12:41:32 +02001037static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1038 u32 cntl_reg, u32 status_reg)
1039{
1040 int r, i;
1041 struct atom_clock_dividers dividers;
1042
1043 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1044 clock, false, &dividers);
1045 if (r)
1046 return r;
1047
1048 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1049
1050 for (i = 0; i < 100; i++) {
1051 if (RREG32(status_reg) & DCLK_STATUS)
1052 break;
1053 mdelay(10);
1054 }
1055 if (i == 100)
1056 return -ETIMEDOUT;
1057
1058 return 0;
1059}
1060
1061int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1062{
1063 int r = 0;
1064 u32 cg_scratch = RREG32(CG_SCRATCH1);
1065
1066 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1067 if (r)
1068 goto done;
1069 cg_scratch &= 0xffff0000;
1070 cg_scratch |= vclk / 100; /* Mhz */
1071
1072 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1073 if (r)
1074 goto done;
1075 cg_scratch &= 0x0000ffff;
1076 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1077
1078done:
1079 WREG32(CG_SCRATCH1, cg_scratch);
1080
1081 return r;
1082}
1083
Alex Deuchera8b49252013-04-08 12:41:33 +02001084int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1085{
1086 /* start off with something large */
Christian Königfacd1122013-04-29 11:55:02 +02001087 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
Alex Deuchera8b49252013-04-08 12:41:33 +02001088 int r;
1089
Christian König4ed10832013-04-18 15:25:58 +02001090 /* bypass vclk and dclk with bclk */
1091 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1092 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1093 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1094
1095 /* put PLL in bypass mode */
1096 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1097
1098 if (!vclk || !dclk) {
1099 /* keep the Bypass mode, put PLL to sleep */
1100 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1101 return 0;
1102 }
1103
Christian Königfacd1122013-04-29 11:55:02 +02001104 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1105 16384, 0x03FFFFFF, 0, 128, 5,
1106 &fb_div, &vclk_div, &dclk_div);
1107 if (r)
1108 return r;
Alex Deuchera8b49252013-04-08 12:41:33 +02001109
1110 /* set VCO_MODE to 1 */
1111 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1112
1113 /* toggle UPLL_SLEEP to 1 then back to 0 */
1114 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1115 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1116
1117 /* deassert UPLL_RESET */
1118 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1119
1120 mdelay(1);
1121
Christian Königfacd1122013-04-29 11:55:02 +02001122 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001123 if (r)
1124 return r;
1125
1126 /* assert UPLL_RESET again */
1127 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1128
1129 /* disable spread spectrum. */
1130 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1131
1132 /* set feedback divider */
Christian Königfacd1122013-04-29 11:55:02 +02001133 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
Alex Deuchera8b49252013-04-08 12:41:33 +02001134
1135 /* set ref divider to 0 */
1136 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1137
Christian Königfacd1122013-04-29 11:55:02 +02001138 if (fb_div < 307200)
Alex Deuchera8b49252013-04-08 12:41:33 +02001139 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1140 else
1141 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1142
1143 /* set PDIV_A and PDIV_B */
1144 WREG32_P(CG_UPLL_FUNC_CNTL_2,
Christian Königfacd1122013-04-29 11:55:02 +02001145 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
Alex Deuchera8b49252013-04-08 12:41:33 +02001146 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1147
1148 /* give the PLL some time to settle */
1149 mdelay(15);
1150
1151 /* deassert PLL_RESET */
1152 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1153
1154 mdelay(15);
1155
1156 /* switch from bypass mode to normal mode */
1157 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1158
Christian Königfacd1122013-04-29 11:55:02 +02001159 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001160 if (r)
1161 return r;
1162
1163 /* switch VCLK and DCLK selection */
1164 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1165 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1166 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1167
1168 mdelay(100);
1169
1170 return 0;
1171}
1172
Alex Deucherd054ac12011-09-01 17:46:15 +00001173void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1174{
1175 u16 ctl, v;
Jiang Liu32195ae2012-07-24 17:20:30 +08001176 int err;
Alex Deucherd054ac12011-09-01 17:46:15 +00001177
Jiang Liu32195ae2012-07-24 17:20:30 +08001178 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001179 if (err)
1180 return;
1181
1182 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1183
1184 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1185 * to avoid hangs or perfomance issues
1186 */
1187 if ((v == 0) || (v == 6) || (v == 7)) {
1188 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1189 ctl |= (2 << 12);
Jiang Liu32195ae2012-07-24 17:20:30 +08001190 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001191 }
1192}
1193
Alex Deucher10257a62013-04-09 18:49:59 -04001194static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1195{
1196 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1197 return true;
1198 else
1199 return false;
1200}
1201
1202static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1203{
1204 u32 pos1, pos2;
1205
1206 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1207 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1208
1209 if (pos1 != pos2)
1210 return true;
1211 else
1212 return false;
1213}
1214
Alex Deucher377edc82012-07-17 14:02:42 -04001215/**
1216 * dce4_wait_for_vblank - vblank wait asic callback.
1217 *
1218 * @rdev: radeon_device pointer
1219 * @crtc: crtc to wait for vblank on
1220 *
1221 * Wait for vblank on the requested crtc (evergreen+).
1222 */
Alex Deucher3ae19b72012-02-23 17:53:37 -05001223void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1224{
Alex Deucher10257a62013-04-09 18:49:59 -04001225 unsigned i = 0;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001226
Alex Deucher4a159032012-08-15 17:13:53 -04001227 if (crtc >= rdev->num_crtc)
1228 return;
1229
Alex Deucher10257a62013-04-09 18:49:59 -04001230 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1231 return;
1232
1233 /* depending on when we hit vblank, we may be close to active; if so,
1234 * wait for another frame.
1235 */
1236 while (dce4_is_in_vblank(rdev, crtc)) {
1237 if (i++ % 100 == 0) {
1238 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001239 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001240 }
Alex Deucher10257a62013-04-09 18:49:59 -04001241 }
1242
1243 while (!dce4_is_in_vblank(rdev, crtc)) {
1244 if (i++ % 100 == 0) {
1245 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001246 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001247 }
1248 }
1249}
1250
Alex Deucher377edc82012-07-17 14:02:42 -04001251/**
1252 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1253 *
1254 * @rdev: radeon_device pointer
1255 * @crtc: crtc to prepare for pageflip on
1256 *
1257 * Pre-pageflip callback (evergreen+).
1258 * Enables the pageflip irq (vblank irq).
1259 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001260void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1261{
Alex Deucher6f34be52010-11-21 10:59:01 -05001262 /* enable the pflip int */
1263 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1264}
1265
Alex Deucher377edc82012-07-17 14:02:42 -04001266/**
1267 * evergreen_post_page_flip - pos-pageflip callback.
1268 *
1269 * @rdev: radeon_device pointer
1270 * @crtc: crtc to cleanup pageflip on
1271 *
1272 * Post-pageflip callback (evergreen+).
1273 * Disables the pageflip irq (vblank irq).
1274 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001275void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1276{
1277 /* disable the pflip int */
1278 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1279}
1280
Alex Deucher377edc82012-07-17 14:02:42 -04001281/**
1282 * evergreen_page_flip - pageflip callback.
1283 *
1284 * @rdev: radeon_device pointer
1285 * @crtc_id: crtc to cleanup pageflip on
1286 * @crtc_base: new address of the crtc (GPU MC address)
1287 *
1288 * Does the actual pageflip (evergreen+).
1289 * During vblank we take the crtc lock and wait for the update_pending
1290 * bit to go high, when it does, we release the lock, and allow the
1291 * double buffered update to take place.
1292 * Returns the current update pending status.
1293 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001294u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1295{
1296 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1297 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
Alex Deucherf6496472011-11-28 14:49:26 -05001298 int i;
Alex Deucher6f34be52010-11-21 10:59:01 -05001299
1300 /* Lock the graphics update lock */
1301 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1302 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1303
1304 /* update the scanout addresses */
1305 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1306 upper_32_bits(crtc_base));
1307 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1308 (u32)crtc_base);
1309
1310 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1311 upper_32_bits(crtc_base));
1312 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1313 (u32)crtc_base);
1314
1315 /* Wait for update_pending to go high. */
Alex Deucherf6496472011-11-28 14:49:26 -05001316 for (i = 0; i < rdev->usec_timeout; i++) {
1317 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1318 break;
1319 udelay(1);
1320 }
Alex Deucher6f34be52010-11-21 10:59:01 -05001321 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1322
1323 /* Unlock the lock, so double-buffering can take place inside vblank */
1324 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1325 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1326
1327 /* Return current update_pending status: */
1328 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1329}
1330
Alex Deucher21a81222010-07-02 12:58:16 -04001331/* get temperature in millidegrees */
Alex Deucher20d391d2011-02-01 16:12:34 -05001332int evergreen_get_temp(struct radeon_device *rdev)
Alex Deucher21a81222010-07-02 12:58:16 -04001333{
Alex Deucher1c88d742011-06-14 19:15:53 +00001334 u32 temp, toffset;
1335 int actual_temp = 0;
Alex Deucher21a81222010-07-02 12:58:16 -04001336
Alex Deucher67b3f822011-05-25 18:45:37 -04001337 if (rdev->family == CHIP_JUNIPER) {
1338 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1339 TOFFSET_SHIFT;
1340 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1341 TS0_ADC_DOUT_SHIFT;
Alex Deucher21a81222010-07-02 12:58:16 -04001342
Alex Deucher67b3f822011-05-25 18:45:37 -04001343 if (toffset & 0x100)
1344 actual_temp = temp / 2 - (0x200 - toffset);
1345 else
1346 actual_temp = temp / 2 + toffset;
1347
1348 actual_temp = actual_temp * 1000;
1349
1350 } else {
1351 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1352 ASIC_T_SHIFT;
1353
1354 if (temp & 0x400)
1355 actual_temp = -256;
1356 else if (temp & 0x200)
1357 actual_temp = 255;
1358 else if (temp & 0x100) {
1359 actual_temp = temp & 0x1ff;
1360 actual_temp |= ~0x1ff;
1361 } else
1362 actual_temp = temp & 0xff;
1363
1364 actual_temp = (actual_temp * 1000) / 2;
1365 }
1366
1367 return actual_temp;
Alex Deucher21a81222010-07-02 12:58:16 -04001368}
1369
Alex Deucher20d391d2011-02-01 16:12:34 -05001370int sumo_get_temp(struct radeon_device *rdev)
Alex Deuchere33df252010-11-22 17:56:32 -05001371{
1372 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
Alex Deucher20d391d2011-02-01 16:12:34 -05001373 int actual_temp = temp - 49;
Alex Deuchere33df252010-11-22 17:56:32 -05001374
1375 return actual_temp * 1000;
1376}
1377
Alex Deucher377edc82012-07-17 14:02:42 -04001378/**
1379 * sumo_pm_init_profile - Initialize power profiles callback.
1380 *
1381 * @rdev: radeon_device pointer
1382 *
1383 * Initialize the power states used in profile mode
1384 * (sumo, trinity, SI).
1385 * Used for profile mode only.
1386 */
Alex Deuchera4c9e2e2011-11-04 10:09:41 -04001387void sumo_pm_init_profile(struct radeon_device *rdev)
1388{
1389 int idx;
1390
1391 /* default */
1392 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1393 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1394 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1395 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1396
1397 /* low,mid sh/mh */
1398 if (rdev->flags & RADEON_IS_MOBILITY)
1399 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1400 else
1401 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1402
1403 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1404 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1405 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1406 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1407
1408 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1409 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1410 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1412
1413 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1414 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1415 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1416 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1417
1418 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1419 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1420 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1421 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1422
1423 /* high sh/mh */
1424 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1425 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1426 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1427 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1428 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1429 rdev->pm.power_state[idx].num_clock_modes - 1;
1430
1431 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1432 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1433 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1434 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1435 rdev->pm.power_state[idx].num_clock_modes - 1;
1436}
1437
Alex Deucher377edc82012-07-17 14:02:42 -04001438/**
Alex Deucher27810fb2012-10-01 19:25:11 -04001439 * btc_pm_init_profile - Initialize power profiles callback.
1440 *
1441 * @rdev: radeon_device pointer
1442 *
1443 * Initialize the power states used in profile mode
1444 * (BTC, cayman).
1445 * Used for profile mode only.
1446 */
1447void btc_pm_init_profile(struct radeon_device *rdev)
1448{
1449 int idx;
1450
1451 /* default */
1452 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1453 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1454 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1455 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1456 /* starting with BTC, there is one state that is used for both
1457 * MH and SH. Difference is that we always use the high clock index for
1458 * mclk.
1459 */
1460 if (rdev->flags & RADEON_IS_MOBILITY)
1461 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1462 else
1463 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1464 /* low sh */
1465 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1466 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1467 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1468 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1469 /* mid sh */
1470 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1471 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1472 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1473 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1474 /* high sh */
1475 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1476 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1477 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1478 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1479 /* low mh */
1480 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1481 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1482 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1483 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1484 /* mid mh */
1485 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1486 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1487 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1488 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1489 /* high mh */
1490 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1491 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1492 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1493 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1494}
1495
1496/**
Alex Deucher377edc82012-07-17 14:02:42 -04001497 * evergreen_pm_misc - set additional pm hw parameters callback.
1498 *
1499 * @rdev: radeon_device pointer
1500 *
1501 * Set non-clock parameters associated with a power state
1502 * (voltage, etc.) (evergreen+).
1503 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001504void evergreen_pm_misc(struct radeon_device *rdev)
1505{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -04001506 int req_ps_idx = rdev->pm.requested_power_state_index;
1507 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1508 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1509 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -04001510
Alex Deucher2feea492011-04-12 14:49:24 -04001511 if (voltage->type == VOLTAGE_SW) {
Alex Deucherc6cf7772013-07-05 13:14:30 -04001512 /* 0xff0x are flags rather then an actual voltage */
1513 if ((voltage->voltage & 0xff00) == 0xff00)
Alex Deuchera377e182011-06-20 13:00:31 -04001514 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001515 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
Alex Deucher8a83ec52011-04-12 14:49:23 -04001516 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
Alex Deucher4d601732010-06-07 18:15:18 -04001517 rdev->pm.current_vddc = voltage->voltage;
Alex Deucher2feea492011-04-12 14:49:24 -04001518 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1519 }
Alex Deucher7ae764b2013-02-11 08:44:48 -05001520
1521 /* starting with BTC, there is one state that is used for both
1522 * MH and SH. Difference is that we always use the high clock index for
1523 * mclk and vddci.
1524 */
1525 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1526 (rdev->family >= CHIP_BARTS) &&
1527 rdev->pm.active_crtc_count &&
1528 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1529 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1530 voltage = &rdev->pm.power_state[req_ps_idx].
1531 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1532
Alex Deucherc6cf7772013-07-05 13:14:30 -04001533 /* 0xff0x are flags rather then an actual voltage */
1534 if ((voltage->vddci & 0xff00) == 0xff00)
Alex Deuchera377e182011-06-20 13:00:31 -04001535 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001536 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1537 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1538 rdev->pm.current_vddci = voltage->vddci;
1539 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
Alex Deucher4d601732010-06-07 18:15:18 -04001540 }
1541 }
Alex Deucher49e02b72010-04-23 17:57:27 -04001542}
1543
Alex Deucher377edc82012-07-17 14:02:42 -04001544/**
1545 * evergreen_pm_prepare - pre-power state change callback.
1546 *
1547 * @rdev: radeon_device pointer
1548 *
1549 * Prepare for a power state change (evergreen+).
1550 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001551void evergreen_pm_prepare(struct radeon_device *rdev)
1552{
1553 struct drm_device *ddev = rdev->ddev;
1554 struct drm_crtc *crtc;
1555 struct radeon_crtc *radeon_crtc;
1556 u32 tmp;
1557
1558 /* disable any active CRTCs */
1559 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1560 radeon_crtc = to_radeon_crtc(crtc);
1561 if (radeon_crtc->enabled) {
1562 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1563 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1564 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1565 }
1566 }
1567}
1568
Alex Deucher377edc82012-07-17 14:02:42 -04001569/**
1570 * evergreen_pm_finish - post-power state change callback.
1571 *
1572 * @rdev: radeon_device pointer
1573 *
1574 * Clean up after a power state change (evergreen+).
1575 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001576void evergreen_pm_finish(struct radeon_device *rdev)
1577{
1578 struct drm_device *ddev = rdev->ddev;
1579 struct drm_crtc *crtc;
1580 struct radeon_crtc *radeon_crtc;
1581 u32 tmp;
1582
1583 /* enable any active CRTCs */
1584 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1585 radeon_crtc = to_radeon_crtc(crtc);
1586 if (radeon_crtc->enabled) {
1587 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1588 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1589 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1590 }
1591 }
1592}
1593
Alex Deucher377edc82012-07-17 14:02:42 -04001594/**
1595 * evergreen_hpd_sense - hpd sense callback.
1596 *
1597 * @rdev: radeon_device pointer
1598 * @hpd: hpd (hotplug detect) pin
1599 *
1600 * Checks if a digital monitor is connected (evergreen+).
1601 * Returns true if connected, false if not connected.
1602 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001603bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1604{
1605 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001606
1607 switch (hpd) {
1608 case RADEON_HPD_1:
1609 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1610 connected = true;
1611 break;
1612 case RADEON_HPD_2:
1613 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1614 connected = true;
1615 break;
1616 case RADEON_HPD_3:
1617 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1618 connected = true;
1619 break;
1620 case RADEON_HPD_4:
1621 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1622 connected = true;
1623 break;
1624 case RADEON_HPD_5:
1625 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1626 connected = true;
1627 break;
1628 case RADEON_HPD_6:
1629 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1630 connected = true;
1631 break;
1632 default:
1633 break;
1634 }
1635
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001636 return connected;
1637}
1638
Alex Deucher377edc82012-07-17 14:02:42 -04001639/**
1640 * evergreen_hpd_set_polarity - hpd set polarity callback.
1641 *
1642 * @rdev: radeon_device pointer
1643 * @hpd: hpd (hotplug detect) pin
1644 *
1645 * Set the polarity of the hpd pin (evergreen+).
1646 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001647void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1648 enum radeon_hpd_id hpd)
1649{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001650 u32 tmp;
1651 bool connected = evergreen_hpd_sense(rdev, hpd);
1652
1653 switch (hpd) {
1654 case RADEON_HPD_1:
1655 tmp = RREG32(DC_HPD1_INT_CONTROL);
1656 if (connected)
1657 tmp &= ~DC_HPDx_INT_POLARITY;
1658 else
1659 tmp |= DC_HPDx_INT_POLARITY;
1660 WREG32(DC_HPD1_INT_CONTROL, tmp);
1661 break;
1662 case RADEON_HPD_2:
1663 tmp = RREG32(DC_HPD2_INT_CONTROL);
1664 if (connected)
1665 tmp &= ~DC_HPDx_INT_POLARITY;
1666 else
1667 tmp |= DC_HPDx_INT_POLARITY;
1668 WREG32(DC_HPD2_INT_CONTROL, tmp);
1669 break;
1670 case RADEON_HPD_3:
1671 tmp = RREG32(DC_HPD3_INT_CONTROL);
1672 if (connected)
1673 tmp &= ~DC_HPDx_INT_POLARITY;
1674 else
1675 tmp |= DC_HPDx_INT_POLARITY;
1676 WREG32(DC_HPD3_INT_CONTROL, tmp);
1677 break;
1678 case RADEON_HPD_4:
1679 tmp = RREG32(DC_HPD4_INT_CONTROL);
1680 if (connected)
1681 tmp &= ~DC_HPDx_INT_POLARITY;
1682 else
1683 tmp |= DC_HPDx_INT_POLARITY;
1684 WREG32(DC_HPD4_INT_CONTROL, tmp);
1685 break;
1686 case RADEON_HPD_5:
1687 tmp = RREG32(DC_HPD5_INT_CONTROL);
1688 if (connected)
1689 tmp &= ~DC_HPDx_INT_POLARITY;
1690 else
1691 tmp |= DC_HPDx_INT_POLARITY;
1692 WREG32(DC_HPD5_INT_CONTROL, tmp);
1693 break;
1694 case RADEON_HPD_6:
1695 tmp = RREG32(DC_HPD6_INT_CONTROL);
1696 if (connected)
1697 tmp &= ~DC_HPDx_INT_POLARITY;
1698 else
1699 tmp |= DC_HPDx_INT_POLARITY;
1700 WREG32(DC_HPD6_INT_CONTROL, tmp);
1701 break;
1702 default:
1703 break;
1704 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001705}
1706
Alex Deucher377edc82012-07-17 14:02:42 -04001707/**
1708 * evergreen_hpd_init - hpd setup callback.
1709 *
1710 * @rdev: radeon_device pointer
1711 *
1712 * Setup the hpd pins used by the card (evergreen+).
1713 * Enable the pin, set the polarity, and enable the hpd interrupts.
1714 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001715void evergreen_hpd_init(struct radeon_device *rdev)
1716{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001717 struct drm_device *dev = rdev->ddev;
1718 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001719 unsigned enabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001720 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1721 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001722
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001723 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1724 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
Alex Deucher2e97be72013-04-11 12:45:34 -04001725
1726 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1727 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1728 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1729 * aux dp channel on imac and help (but not completely fix)
1730 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1731 * also avoid interrupt storms during dpms.
1732 */
1733 continue;
1734 }
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001735 switch (radeon_connector->hpd.hpd) {
1736 case RADEON_HPD_1:
1737 WREG32(DC_HPD1_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001738 break;
1739 case RADEON_HPD_2:
1740 WREG32(DC_HPD2_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001741 break;
1742 case RADEON_HPD_3:
1743 WREG32(DC_HPD3_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001744 break;
1745 case RADEON_HPD_4:
1746 WREG32(DC_HPD4_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001747 break;
1748 case RADEON_HPD_5:
1749 WREG32(DC_HPD5_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001750 break;
1751 case RADEON_HPD_6:
1752 WREG32(DC_HPD6_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001753 break;
1754 default:
1755 break;
1756 }
Alex Deucher64912e92011-11-03 11:21:39 -04001757 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
Christian Koenigfb982572012-05-17 01:33:30 +02001758 enabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001759 }
Christian Koenigfb982572012-05-17 01:33:30 +02001760 radeon_irq_kms_enable_hpd(rdev, enabled);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001761}
1762
Alex Deucher377edc82012-07-17 14:02:42 -04001763/**
1764 * evergreen_hpd_fini - hpd tear down callback.
1765 *
1766 * @rdev: radeon_device pointer
1767 *
1768 * Tear down the hpd pins used by the card (evergreen+).
1769 * Disable the hpd interrupts.
1770 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001771void evergreen_hpd_fini(struct radeon_device *rdev)
1772{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001773 struct drm_device *dev = rdev->ddev;
1774 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001775 unsigned disabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001776
1777 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1778 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1779 switch (radeon_connector->hpd.hpd) {
1780 case RADEON_HPD_1:
1781 WREG32(DC_HPD1_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001782 break;
1783 case RADEON_HPD_2:
1784 WREG32(DC_HPD2_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001785 break;
1786 case RADEON_HPD_3:
1787 WREG32(DC_HPD3_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001788 break;
1789 case RADEON_HPD_4:
1790 WREG32(DC_HPD4_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001791 break;
1792 case RADEON_HPD_5:
1793 WREG32(DC_HPD5_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001794 break;
1795 case RADEON_HPD_6:
1796 WREG32(DC_HPD6_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001797 break;
1798 default:
1799 break;
1800 }
Christian Koenigfb982572012-05-17 01:33:30 +02001801 disabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001802 }
Christian Koenigfb982572012-05-17 01:33:30 +02001803 radeon_irq_kms_disable_hpd(rdev, disabled);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001804}
1805
Alex Deucherf9d9c362010-10-22 02:51:05 -04001806/* watermark setup */
1807
1808static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1809 struct radeon_crtc *radeon_crtc,
1810 struct drm_display_mode *mode,
1811 struct drm_display_mode *other_mode)
1812{
Alex Deucher12dfc842011-04-14 19:07:34 -04001813 u32 tmp;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001814 /*
1815 * Line Buffer Setup
1816 * There are 3 line buffers, each one shared by 2 display controllers.
1817 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1818 * the display controllers. The paritioning is done via one of four
1819 * preset allocations specified in bits 2:0:
1820 * first display controller
1821 * 0 - first half of lb (3840 * 2)
1822 * 1 - first 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001823 * 2 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001824 * 3 - first 1/4 of lb (1920 * 2)
1825 * second display controller
1826 * 4 - second half of lb (3840 * 2)
1827 * 5 - second 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001828 * 6 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001829 * 7 - last 1/4 of lb (1920 * 2)
1830 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001831 /* this can get tricky if we have two large displays on a paired group
1832 * of crtcs. Ideally for multiple large displays we'd assign them to
1833 * non-linked crtcs for maximum line buffer allocation.
1834 */
1835 if (radeon_crtc->base.enabled && mode) {
1836 if (other_mode)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001837 tmp = 0; /* 1/2 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001838 else
1839 tmp = 2; /* whole */
1840 } else
1841 tmp = 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001842
1843 /* second controller of the pair uses second half of the lb */
1844 if (radeon_crtc->crtc_id % 2)
1845 tmp += 4;
1846 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1847
Alex Deucher12dfc842011-04-14 19:07:34 -04001848 if (radeon_crtc->base.enabled && mode) {
1849 switch (tmp) {
1850 case 0:
1851 case 4:
1852 default:
1853 if (ASIC_IS_DCE5(rdev))
1854 return 4096 * 2;
1855 else
1856 return 3840 * 2;
1857 case 1:
1858 case 5:
1859 if (ASIC_IS_DCE5(rdev))
1860 return 6144 * 2;
1861 else
1862 return 5760 * 2;
1863 case 2:
1864 case 6:
1865 if (ASIC_IS_DCE5(rdev))
1866 return 8192 * 2;
1867 else
1868 return 7680 * 2;
1869 case 3:
1870 case 7:
1871 if (ASIC_IS_DCE5(rdev))
1872 return 2048 * 2;
1873 else
1874 return 1920 * 2;
1875 }
Alex Deucherf9d9c362010-10-22 02:51:05 -04001876 }
Alex Deucher12dfc842011-04-14 19:07:34 -04001877
1878 /* controller not enabled, so no lb used */
1879 return 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001880}
1881
Alex Deucherca7db222012-03-20 17:18:30 -04001882u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001883{
1884 u32 tmp = RREG32(MC_SHARED_CHMAP);
1885
1886 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1887 case 0:
1888 default:
1889 return 1;
1890 case 1:
1891 return 2;
1892 case 2:
1893 return 4;
1894 case 3:
1895 return 8;
1896 }
1897}
1898
1899struct evergreen_wm_params {
1900 u32 dram_channels; /* number of dram channels */
1901 u32 yclk; /* bandwidth per dram data pin in kHz */
1902 u32 sclk; /* engine clock in kHz */
1903 u32 disp_clk; /* display clock in kHz */
1904 u32 src_width; /* viewport width */
1905 u32 active_time; /* active display time in ns */
1906 u32 blank_time; /* blank time in ns */
1907 bool interlaced; /* mode is interlaced */
1908 fixed20_12 vsc; /* vertical scale ratio */
1909 u32 num_heads; /* number of active crtcs */
1910 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1911 u32 lb_size; /* line buffer allocated to pipe */
1912 u32 vtaps; /* vertical scaler taps */
1913};
1914
1915static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1916{
1917 /* Calculate DRAM Bandwidth and the part allocated to display. */
1918 fixed20_12 dram_efficiency; /* 0.7 */
1919 fixed20_12 yclk, dram_channels, bandwidth;
1920 fixed20_12 a;
1921
1922 a.full = dfixed_const(1000);
1923 yclk.full = dfixed_const(wm->yclk);
1924 yclk.full = dfixed_div(yclk, a);
1925 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1926 a.full = dfixed_const(10);
1927 dram_efficiency.full = dfixed_const(7);
1928 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1929 bandwidth.full = dfixed_mul(dram_channels, yclk);
1930 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1931
1932 return dfixed_trunc(bandwidth);
1933}
1934
1935static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1936{
1937 /* Calculate DRAM Bandwidth and the part allocated to display. */
1938 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1939 fixed20_12 yclk, dram_channels, bandwidth;
1940 fixed20_12 a;
1941
1942 a.full = dfixed_const(1000);
1943 yclk.full = dfixed_const(wm->yclk);
1944 yclk.full = dfixed_div(yclk, a);
1945 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1946 a.full = dfixed_const(10);
1947 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1948 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1949 bandwidth.full = dfixed_mul(dram_channels, yclk);
1950 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1951
1952 return dfixed_trunc(bandwidth);
1953}
1954
1955static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1956{
1957 /* Calculate the display Data return Bandwidth */
1958 fixed20_12 return_efficiency; /* 0.8 */
1959 fixed20_12 sclk, bandwidth;
1960 fixed20_12 a;
1961
1962 a.full = dfixed_const(1000);
1963 sclk.full = dfixed_const(wm->sclk);
1964 sclk.full = dfixed_div(sclk, a);
1965 a.full = dfixed_const(10);
1966 return_efficiency.full = dfixed_const(8);
1967 return_efficiency.full = dfixed_div(return_efficiency, a);
1968 a.full = dfixed_const(32);
1969 bandwidth.full = dfixed_mul(a, sclk);
1970 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1971
1972 return dfixed_trunc(bandwidth);
1973}
1974
1975static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1976{
1977 /* Calculate the DMIF Request Bandwidth */
1978 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1979 fixed20_12 disp_clk, bandwidth;
1980 fixed20_12 a;
1981
1982 a.full = dfixed_const(1000);
1983 disp_clk.full = dfixed_const(wm->disp_clk);
1984 disp_clk.full = dfixed_div(disp_clk, a);
1985 a.full = dfixed_const(10);
1986 disp_clk_request_efficiency.full = dfixed_const(8);
1987 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1988 a.full = dfixed_const(32);
1989 bandwidth.full = dfixed_mul(a, disp_clk);
1990 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1991
1992 return dfixed_trunc(bandwidth);
1993}
1994
1995static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1996{
1997 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1998 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1999 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2000 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2001
2002 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2003}
2004
2005static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2006{
2007 /* Calculate the display mode Average Bandwidth
2008 * DisplayMode should contain the source and destination dimensions,
2009 * timing, etc.
2010 */
2011 fixed20_12 bpp;
2012 fixed20_12 line_time;
2013 fixed20_12 src_width;
2014 fixed20_12 bandwidth;
2015 fixed20_12 a;
2016
2017 a.full = dfixed_const(1000);
2018 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2019 line_time.full = dfixed_div(line_time, a);
2020 bpp.full = dfixed_const(wm->bytes_per_pixel);
2021 src_width.full = dfixed_const(wm->src_width);
2022 bandwidth.full = dfixed_mul(src_width, bpp);
2023 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2024 bandwidth.full = dfixed_div(bandwidth, line_time);
2025
2026 return dfixed_trunc(bandwidth);
2027}
2028
2029static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2030{
2031 /* First calcualte the latency in ns */
2032 u32 mc_latency = 2000; /* 2000 ns. */
2033 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2034 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2035 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2036 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2037 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2038 (wm->num_heads * cursor_line_pair_return_time);
2039 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2040 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2041 fixed20_12 a, b, c;
2042
2043 if (wm->num_heads == 0)
2044 return 0;
2045
2046 a.full = dfixed_const(2);
2047 b.full = dfixed_const(1);
2048 if ((wm->vsc.full > a.full) ||
2049 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2050 (wm->vtaps >= 5) ||
2051 ((wm->vsc.full >= a.full) && wm->interlaced))
2052 max_src_lines_per_dst_line = 4;
2053 else
2054 max_src_lines_per_dst_line = 2;
2055
2056 a.full = dfixed_const(available_bandwidth);
2057 b.full = dfixed_const(wm->num_heads);
2058 a.full = dfixed_div(a, b);
2059
2060 b.full = dfixed_const(1000);
2061 c.full = dfixed_const(wm->disp_clk);
2062 b.full = dfixed_div(c, b);
2063 c.full = dfixed_const(wm->bytes_per_pixel);
2064 b.full = dfixed_mul(b, c);
2065
2066 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2067
2068 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2069 b.full = dfixed_const(1000);
2070 c.full = dfixed_const(lb_fill_bw);
2071 b.full = dfixed_div(c, b);
2072 a.full = dfixed_div(a, b);
2073 line_fill_time = dfixed_trunc(a);
2074
2075 if (line_fill_time < wm->active_time)
2076 return latency;
2077 else
2078 return latency + (line_fill_time - wm->active_time);
2079
2080}
2081
2082static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2083{
2084 if (evergreen_average_bandwidth(wm) <=
2085 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2086 return true;
2087 else
2088 return false;
2089};
2090
2091static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2092{
2093 if (evergreen_average_bandwidth(wm) <=
2094 (evergreen_available_bandwidth(wm) / wm->num_heads))
2095 return true;
2096 else
2097 return false;
2098};
2099
2100static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2101{
2102 u32 lb_partitions = wm->lb_size / wm->src_width;
2103 u32 line_time = wm->active_time + wm->blank_time;
2104 u32 latency_tolerant_lines;
2105 u32 latency_hiding;
2106 fixed20_12 a;
2107
2108 a.full = dfixed_const(1);
2109 if (wm->vsc.full > a.full)
2110 latency_tolerant_lines = 1;
2111 else {
2112 if (lb_partitions <= (wm->vtaps + 1))
2113 latency_tolerant_lines = 1;
2114 else
2115 latency_tolerant_lines = 2;
2116 }
2117
2118 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2119
2120 if (evergreen_latency_watermark(wm) <= latency_hiding)
2121 return true;
2122 else
2123 return false;
2124}
2125
2126static void evergreen_program_watermarks(struct radeon_device *rdev,
2127 struct radeon_crtc *radeon_crtc,
2128 u32 lb_size, u32 num_heads)
2129{
2130 struct drm_display_mode *mode = &radeon_crtc->base.mode;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002131 struct evergreen_wm_params wm_low, wm_high;
2132 u32 dram_channels;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002133 u32 pixel_period;
2134 u32 line_time = 0;
2135 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2136 u32 priority_a_mark = 0, priority_b_mark = 0;
2137 u32 priority_a_cnt = PRIORITY_OFF;
2138 u32 priority_b_cnt = PRIORITY_OFF;
2139 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2140 u32 tmp, arb_control3;
2141 fixed20_12 a, b, c;
2142
2143 if (radeon_crtc->base.enabled && num_heads && mode) {
2144 pixel_period = 1000000 / (u32)mode->clock;
2145 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2146 priority_a_cnt = 0;
2147 priority_b_cnt = 0;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002148 dram_channels = evergreen_get_number_of_dram_channels(rdev);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002149
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002150 /* watermark for high clocks */
2151 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2152 wm_high.yclk =
2153 radeon_dpm_get_mclk(rdev, false) * 10;
2154 wm_high.sclk =
2155 radeon_dpm_get_sclk(rdev, false) * 10;
2156 } else {
2157 wm_high.yclk = rdev->pm.current_mclk * 10;
2158 wm_high.sclk = rdev->pm.current_sclk * 10;
2159 }
2160
2161 wm_high.disp_clk = mode->clock;
2162 wm_high.src_width = mode->crtc_hdisplay;
2163 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2164 wm_high.blank_time = line_time - wm_high.active_time;
2165 wm_high.interlaced = false;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002166 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002167 wm_high.interlaced = true;
2168 wm_high.vsc = radeon_crtc->vsc;
2169 wm_high.vtaps = 1;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002170 if (radeon_crtc->rmx_type != RMX_OFF)
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002171 wm_high.vtaps = 2;
2172 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2173 wm_high.lb_size = lb_size;
2174 wm_high.dram_channels = dram_channels;
2175 wm_high.num_heads = num_heads;
2176
2177 /* watermark for low clocks */
2178 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2179 wm_low.yclk =
2180 radeon_dpm_get_mclk(rdev, true) * 10;
2181 wm_low.sclk =
2182 radeon_dpm_get_sclk(rdev, true) * 10;
2183 } else {
2184 wm_low.yclk = rdev->pm.current_mclk * 10;
2185 wm_low.sclk = rdev->pm.current_sclk * 10;
2186 }
2187
2188 wm_low.disp_clk = mode->clock;
2189 wm_low.src_width = mode->crtc_hdisplay;
2190 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2191 wm_low.blank_time = line_time - wm_low.active_time;
2192 wm_low.interlaced = false;
2193 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2194 wm_low.interlaced = true;
2195 wm_low.vsc = radeon_crtc->vsc;
2196 wm_low.vtaps = 1;
2197 if (radeon_crtc->rmx_type != RMX_OFF)
2198 wm_low.vtaps = 2;
2199 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2200 wm_low.lb_size = lb_size;
2201 wm_low.dram_channels = dram_channels;
2202 wm_low.num_heads = num_heads;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002203
2204 /* set for high clocks */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002205 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002206 /* set for low clocks */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002207 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002208
2209 /* possibly force display priority to high */
2210 /* should really do this at mode validation time... */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002211 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2212 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2213 !evergreen_check_latency_hiding(&wm_high) ||
Alex Deucherf9d9c362010-10-22 02:51:05 -04002214 (rdev->disp_priority == 2)) {
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002215 DRM_DEBUG_KMS("force priority a to high\n");
Alex Deucherf9d9c362010-10-22 02:51:05 -04002216 priority_a_cnt |= PRIORITY_ALWAYS_ON;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002217 }
2218 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2219 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2220 !evergreen_check_latency_hiding(&wm_low) ||
2221 (rdev->disp_priority == 2)) {
2222 DRM_DEBUG_KMS("force priority b to high\n");
Alex Deucherf9d9c362010-10-22 02:51:05 -04002223 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2224 }
2225
2226 a.full = dfixed_const(1000);
2227 b.full = dfixed_const(mode->clock);
2228 b.full = dfixed_div(b, a);
2229 c.full = dfixed_const(latency_watermark_a);
2230 c.full = dfixed_mul(c, b);
2231 c.full = dfixed_mul(c, radeon_crtc->hsc);
2232 c.full = dfixed_div(c, a);
2233 a.full = dfixed_const(16);
2234 c.full = dfixed_div(c, a);
2235 priority_a_mark = dfixed_trunc(c);
2236 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2237
2238 a.full = dfixed_const(1000);
2239 b.full = dfixed_const(mode->clock);
2240 b.full = dfixed_div(b, a);
2241 c.full = dfixed_const(latency_watermark_b);
2242 c.full = dfixed_mul(c, b);
2243 c.full = dfixed_mul(c, radeon_crtc->hsc);
2244 c.full = dfixed_div(c, a);
2245 a.full = dfixed_const(16);
2246 c.full = dfixed_div(c, a);
2247 priority_b_mark = dfixed_trunc(c);
2248 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2249 }
2250
2251 /* select wm A */
2252 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2253 tmp = arb_control3;
2254 tmp &= ~LATENCY_WATERMARK_MASK(3);
2255 tmp |= LATENCY_WATERMARK_MASK(1);
2256 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2257 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2258 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2259 LATENCY_HIGH_WATERMARK(line_time)));
2260 /* select wm B */
2261 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2262 tmp &= ~LATENCY_WATERMARK_MASK(3);
2263 tmp |= LATENCY_WATERMARK_MASK(2);
2264 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2265 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2266 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2267 LATENCY_HIGH_WATERMARK(line_time)));
2268 /* restore original selection */
2269 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2270
2271 /* write the priority marks */
2272 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2273 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2274
Alex Deucher7178d2a2013-03-21 10:38:49 -04002275 /* save values for DPM */
2276 radeon_crtc->line_time = line_time;
2277 radeon_crtc->wm_high = latency_watermark_a;
2278 radeon_crtc->wm_low = latency_watermark_b;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002279}
2280
Alex Deucher377edc82012-07-17 14:02:42 -04002281/**
2282 * evergreen_bandwidth_update - update display watermarks callback.
2283 *
2284 * @rdev: radeon_device pointer
2285 *
2286 * Update the display watermarks based on the requested mode(s)
2287 * (evergreen+).
2288 */
Alex Deucher0ca2ab52010-02-26 13:57:45 -05002289void evergreen_bandwidth_update(struct radeon_device *rdev)
2290{
Alex Deucherf9d9c362010-10-22 02:51:05 -04002291 struct drm_display_mode *mode0 = NULL;
2292 struct drm_display_mode *mode1 = NULL;
2293 u32 num_heads = 0, lb_size;
2294 int i;
2295
2296 radeon_update_display_priority(rdev);
2297
2298 for (i = 0; i < rdev->num_crtc; i++) {
2299 if (rdev->mode_info.crtcs[i]->base.enabled)
2300 num_heads++;
2301 }
2302 for (i = 0; i < rdev->num_crtc; i += 2) {
2303 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2304 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2305 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2306 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2307 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2308 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2309 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002310}
2311
Alex Deucher377edc82012-07-17 14:02:42 -04002312/**
2313 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2314 *
2315 * @rdev: radeon_device pointer
2316 *
2317 * Wait for the MC (memory controller) to be idle.
2318 * (evergreen+).
2319 * Returns 0 if the MC is idle, -1 if not.
2320 */
Alex Deucherb9952a82011-03-02 20:07:33 -05002321int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002322{
2323 unsigned i;
2324 u32 tmp;
2325
2326 for (i = 0; i < rdev->usec_timeout; i++) {
2327 /* read MC_STATUS */
2328 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2329 if (!tmp)
2330 return 0;
2331 udelay(1);
2332 }
2333 return -1;
2334}
2335
2336/*
2337 * GART
2338 */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002339void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2340{
2341 unsigned i;
2342 u32 tmp;
2343
Alex Deucher6f2f48a2010-12-15 11:01:56 -05002344 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2345
Alex Deucher0fcdb612010-03-24 13:20:41 -04002346 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2347 for (i = 0; i < rdev->usec_timeout; i++) {
2348 /* read MC_STATUS */
2349 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2350 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2351 if (tmp == 2) {
2352 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2353 return;
2354 }
2355 if (tmp) {
2356 return;
2357 }
2358 udelay(1);
2359 }
2360}
2361
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002362static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002363{
2364 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002365 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002366
Jerome Glissec9a1be92011-11-03 11:16:49 -04002367 if (rdev->gart.robj == NULL) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002368 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2369 return -EINVAL;
2370 }
2371 r = radeon_gart_table_vram_pin(rdev);
2372 if (r)
2373 return r;
Dave Airlie82568562010-02-05 16:00:07 +10002374 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002375 /* Setup L2 cache */
2376 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2377 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2378 EFFECTIVE_L2_QUEUE_SIZE(7));
2379 WREG32(VM_L2_CNTL2, 0);
2380 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2381 /* Setup TLB control */
2382 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2383 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2384 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2385 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002386 if (rdev->flags & RADEON_IS_IGP) {
2387 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2388 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2389 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2390 } else {
2391 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2392 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2393 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
Alex Deucher0b8c30b2012-05-31 18:54:43 -04002394 if ((rdev->family == CHIP_JUNIPER) ||
2395 (rdev->family == CHIP_CYPRESS) ||
2396 (rdev->family == CHIP_HEMLOCK) ||
2397 (rdev->family == CHIP_BARTS))
2398 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002399 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002400 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2401 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2402 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2403 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2404 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2405 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2406 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2407 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2408 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2409 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2410 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -04002411 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002412
Alex Deucher0fcdb612010-03-24 13:20:41 -04002413 evergreen_pcie_gart_tlb_flush(rdev);
Tormod Voldenfcf4de52011-08-31 21:54:07 +00002414 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2415 (unsigned)(rdev->mc.gtt_size >> 20),
2416 (unsigned long long)rdev->gart.table_addr);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002417 rdev->gart.ready = true;
2418 return 0;
2419}
2420
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002421static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002422{
2423 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002424
2425 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002426 WREG32(VM_CONTEXT0_CNTL, 0);
2427 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002428
2429 /* Setup L2 cache */
2430 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2431 EFFECTIVE_L2_QUEUE_SIZE(7));
2432 WREG32(VM_L2_CNTL2, 0);
2433 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2434 /* Setup TLB control */
2435 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2436 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2437 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2438 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2439 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2440 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2441 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2442 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Jerome Glissec9a1be92011-11-03 11:16:49 -04002443 radeon_gart_table_vram_unpin(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002444}
2445
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002446static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002447{
2448 evergreen_pcie_gart_disable(rdev);
2449 radeon_gart_table_vram_free(rdev);
2450 radeon_gart_fini(rdev);
2451}
2452
2453
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002454static void evergreen_agp_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002455{
2456 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002457
2458 /* Setup L2 cache */
2459 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2460 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2461 EFFECTIVE_L2_QUEUE_SIZE(7));
2462 WREG32(VM_L2_CNTL2, 0);
2463 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2464 /* Setup TLB control */
2465 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2466 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2467 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2468 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2469 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2470 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2471 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2472 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2473 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2474 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2475 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002476 WREG32(VM_CONTEXT0_CNTL, 0);
2477 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002478}
2479
Alex Deucherb9952a82011-03-02 20:07:33 -05002480void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002481{
Alex Deucher62444b72012-08-15 17:18:42 -04002482 u32 crtc_enabled, tmp, frame_count, blackout;
2483 int i, j;
2484
Alex Deucher51535502012-08-30 14:34:30 -04002485 if (!ASIC_IS_NODCE(rdev)) {
2486 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2487 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002488
Alex Deucher51535502012-08-30 14:34:30 -04002489 /* disable VGA render */
2490 WREG32(VGA_RENDER_CONTROL, 0);
2491 }
Alex Deucher62444b72012-08-15 17:18:42 -04002492 /* blank the display controllers */
2493 for (i = 0; i < rdev->num_crtc; i++) {
2494 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2495 if (crtc_enabled) {
2496 save->crtc_enabled[i] = true;
2497 if (ASIC_IS_DCE6(rdev)) {
2498 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2499 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2500 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002501 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002502 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2503 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2504 }
2505 } else {
2506 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2507 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2508 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002509 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002510 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2511 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Alex Deucherabf14572013-04-10 19:08:14 -04002512 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002513 }
2514 }
2515 /* wait for the next frame */
2516 frame_count = radeon_get_vblank_counter(rdev, i);
2517 for (j = 0; j < rdev->usec_timeout; j++) {
2518 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2519 break;
2520 udelay(1);
2521 }
Alex Deucherabf14572013-04-10 19:08:14 -04002522
2523 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2524 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2525 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2526 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2527 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2528 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2529 save->crtc_enabled[i] = false;
2530 /* ***** */
Alex Deucher804cc4a02012-11-19 09:11:27 -05002531 } else {
2532 save->crtc_enabled[i] = false;
Alex Deucher62444b72012-08-15 17:18:42 -04002533 }
Alex Deucher18007402010-11-22 17:56:28 -05002534 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002535
Alex Deucher62444b72012-08-15 17:18:42 -04002536 radeon_mc_wait_for_idle(rdev);
2537
2538 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2539 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2540 /* Block CPU access */
2541 WREG32(BIF_FB_EN, 0);
2542 /* blackout the MC */
2543 blackout &= ~BLACKOUT_MODE_MASK;
2544 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
Alex Deucherb7eff392011-07-08 11:44:56 -04002545 }
Alex Deuchered39fad2013-01-31 09:00:52 -05002546 /* wait for the MC to settle */
2547 udelay(100);
Alex Deucher968c0162013-04-10 09:58:42 -04002548
2549 /* lock double buffered regs */
2550 for (i = 0; i < rdev->num_crtc; i++) {
2551 if (save->crtc_enabled[i]) {
2552 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2553 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2554 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2555 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2556 }
2557 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2558 if (!(tmp & 1)) {
2559 tmp |= 1;
2560 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2561 }
2562 }
2563 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002564}
2565
Alex Deucherb9952a82011-03-02 20:07:33 -05002566void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002567{
Alex Deucher62444b72012-08-15 17:18:42 -04002568 u32 tmp, frame_count;
2569 int i, j;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002570
Alex Deucher62444b72012-08-15 17:18:42 -04002571 /* update crtc base addresses */
2572 for (i = 0; i < rdev->num_crtc; i++) {
2573 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002574 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002575 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002576 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002577 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002578 (u32)rdev->mc.vram_start);
Alex Deucher62444b72012-08-15 17:18:42 -04002579 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002580 (u32)rdev->mc.vram_start);
Alex Deucherb7eff392011-07-08 11:44:56 -04002581 }
Alex Deucher51535502012-08-30 14:34:30 -04002582
2583 if (!ASIC_IS_NODCE(rdev)) {
2584 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2585 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2586 }
Alex Deucher62444b72012-08-15 17:18:42 -04002587
Alex Deucher968c0162013-04-10 09:58:42 -04002588 /* unlock regs and wait for update */
2589 for (i = 0; i < rdev->num_crtc; i++) {
2590 if (save->crtc_enabled[i]) {
2591 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2592 if ((tmp & 0x3) != 0) {
2593 tmp &= ~0x3;
2594 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2595 }
2596 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2597 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2598 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2599 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2600 }
2601 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2602 if (tmp & 1) {
2603 tmp &= ~1;
2604 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2605 }
2606 for (j = 0; j < rdev->usec_timeout; j++) {
2607 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2608 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2609 break;
2610 udelay(1);
2611 }
2612 }
2613 }
2614
Alex Deucher62444b72012-08-15 17:18:42 -04002615 /* unblackout the MC */
2616 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2617 tmp &= ~BLACKOUT_MODE_MASK;
2618 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2619 /* allow CPU access */
2620 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2621
2622 for (i = 0; i < rdev->num_crtc; i++) {
Alex Deucher695ddeb2012-11-05 16:34:58 +00002623 if (save->crtc_enabled[i]) {
Alex Deucher62444b72012-08-15 17:18:42 -04002624 if (ASIC_IS_DCE6(rdev)) {
2625 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2626 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
Christopher Staitebb5888202013-01-26 11:10:58 -05002627 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002628 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002629 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002630 } else {
2631 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2632 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
Christopher Staitebb5888202013-01-26 11:10:58 -05002633 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002634 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002635 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002636 }
2637 /* wait for the next frame */
2638 frame_count = radeon_get_vblank_counter(rdev, i);
2639 for (j = 0; j < rdev->usec_timeout; j++) {
2640 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2641 break;
2642 udelay(1);
2643 }
2644 }
2645 }
Alex Deucher51535502012-08-30 14:34:30 -04002646 if (!ASIC_IS_NODCE(rdev)) {
2647 /* Unlock vga access */
2648 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2649 mdelay(1);
2650 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2651 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002652}
2653
Alex Deucher755d8192011-03-02 20:07:34 -05002654void evergreen_mc_program(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002655{
2656 struct evergreen_mc_save save;
2657 u32 tmp;
2658 int i, j;
2659
2660 /* Initialize HDP */
2661 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2662 WREG32((0x2c14 + j), 0x00000000);
2663 WREG32((0x2c18 + j), 0x00000000);
2664 WREG32((0x2c1c + j), 0x00000000);
2665 WREG32((0x2c20 + j), 0x00000000);
2666 WREG32((0x2c24 + j), 0x00000000);
2667 }
2668 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2669
2670 evergreen_mc_stop(rdev, &save);
2671 if (evergreen_mc_wait_for_idle(rdev)) {
2672 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2673 }
2674 /* Lockout access through VGA aperture*/
2675 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2676 /* Update configuration */
2677 if (rdev->flags & RADEON_IS_AGP) {
2678 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2679 /* VRAM before AGP */
2680 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2681 rdev->mc.vram_start >> 12);
2682 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2683 rdev->mc.gtt_end >> 12);
2684 } else {
2685 /* VRAM after AGP */
2686 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2687 rdev->mc.gtt_start >> 12);
2688 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2689 rdev->mc.vram_end >> 12);
2690 }
2691 } else {
2692 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2693 rdev->mc.vram_start >> 12);
2694 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2695 rdev->mc.vram_end >> 12);
2696 }
Alex Deucher3b9832f2011-11-10 08:59:39 -05002697 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
Alex Deucher05b3ef62012-03-20 17:18:37 -04002698 /* llano/ontario only */
2699 if ((rdev->family == CHIP_PALM) ||
2700 (rdev->family == CHIP_SUMO) ||
2701 (rdev->family == CHIP_SUMO2)) {
Alex Deucherb4183e32010-12-15 11:04:10 -05002702 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2703 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2704 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2705 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2706 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002707 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2708 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2709 WREG32(MC_VM_FB_LOCATION, tmp);
2710 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
Alex Deucherc46cb4d2011-01-06 19:12:37 -05002711 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
Jerome Glisse46fcd2b2010-06-03 19:34:48 +02002712 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002713 if (rdev->flags & RADEON_IS_AGP) {
2714 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2715 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2716 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2717 } else {
2718 WREG32(MC_VM_AGP_BASE, 0);
2719 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2720 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2721 }
2722 if (evergreen_mc_wait_for_idle(rdev)) {
2723 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2724 }
2725 evergreen_mc_resume(rdev, &save);
2726 /* we need to own VRAM, so turn off the VGA renderer here
2727 * to stop it overwriting our objects */
2728 rv515_vga_render_disable(rdev);
2729}
2730
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002731/*
2732 * CP.
2733 */
Alex Deucher12920592011-02-02 12:37:40 -05002734void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2735{
Christian König876dc9f2012-05-08 14:24:01 +02002736 struct radeon_ring *ring = &rdev->ring[ib->ring];
Alex Deucher89d35802012-07-17 14:02:31 -04002737 u32 next_rptr;
Christian König7b1f2482011-09-23 15:11:23 +02002738
Alex Deucher12920592011-02-02 12:37:40 -05002739 /* set to DX10/11 mode */
Christian Könige32eb502011-10-23 12:56:27 +02002740 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2741 radeon_ring_write(ring, 1);
Christian König45df6802012-07-06 16:22:55 +02002742
2743 if (ring->rptr_save_reg) {
Alex Deucher89d35802012-07-17 14:02:31 -04002744 next_rptr = ring->wptr + 3 + 4;
Christian König45df6802012-07-06 16:22:55 +02002745 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2746 radeon_ring_write(ring, ((ring->rptr_save_reg -
2747 PACKET3_SET_CONFIG_REG_START) >> 2));
2748 radeon_ring_write(ring, next_rptr);
Alex Deucher89d35802012-07-17 14:02:31 -04002749 } else if (rdev->wb.enabled) {
2750 next_rptr = ring->wptr + 5 + 4;
2751 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2752 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2753 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2754 radeon_ring_write(ring, next_rptr);
2755 radeon_ring_write(ring, 0);
Christian König45df6802012-07-06 16:22:55 +02002756 }
2757
Christian Könige32eb502011-10-23 12:56:27 +02002758 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2759 radeon_ring_write(ring,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002760#ifdef __BIG_ENDIAN
2761 (2 << 0) |
2762#endif
2763 (ib->gpu_addr & 0xFFFFFFFC));
Christian Könige32eb502011-10-23 12:56:27 +02002764 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2765 radeon_ring_write(ring, ib->length_dw);
Alex Deucher12920592011-02-02 12:37:40 -05002766}
2767
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002768
2769static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2770{
Alex Deucherfe251e22010-03-24 13:36:43 -04002771 const __be32 *fw_data;
2772 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002773
Alex Deucherfe251e22010-03-24 13:36:43 -04002774 if (!rdev->me_fw || !rdev->pfp_fw)
2775 return -EINVAL;
2776
2777 r700_cp_stop(rdev);
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002778 WREG32(CP_RB_CNTL,
2779#ifdef __BIG_ENDIAN
2780 BUF_SWAP_32BIT |
2781#endif
2782 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
Alex Deucherfe251e22010-03-24 13:36:43 -04002783
2784 fw_data = (const __be32 *)rdev->pfp_fw->data;
2785 WREG32(CP_PFP_UCODE_ADDR, 0);
2786 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2787 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2788 WREG32(CP_PFP_UCODE_ADDR, 0);
2789
2790 fw_data = (const __be32 *)rdev->me_fw->data;
2791 WREG32(CP_ME_RAM_WADDR, 0);
2792 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2793 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2794
2795 WREG32(CP_PFP_UCODE_ADDR, 0);
2796 WREG32(CP_ME_RAM_WADDR, 0);
2797 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002798 return 0;
2799}
2800
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002801static int evergreen_cp_start(struct radeon_device *rdev)
2802{
Christian Könige32eb502011-10-23 12:56:27 +02002803 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucher2281a372010-10-21 13:31:38 -04002804 int r, i;
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002805 uint32_t cp_me;
2806
Christian Könige32eb502011-10-23 12:56:27 +02002807 r = radeon_ring_lock(rdev, ring, 7);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002808 if (r) {
2809 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2810 return r;
2811 }
Christian Könige32eb502011-10-23 12:56:27 +02002812 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2813 radeon_ring_write(ring, 0x1);
2814 radeon_ring_write(ring, 0x0);
2815 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2816 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2817 radeon_ring_write(ring, 0);
2818 radeon_ring_write(ring, 0);
2819 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002820
2821 cp_me = 0xff;
2822 WREG32(CP_ME_CNTL, cp_me);
2823
Christian Könige32eb502011-10-23 12:56:27 +02002824 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002825 if (r) {
2826 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2827 return r;
2828 }
Alex Deucher2281a372010-10-21 13:31:38 -04002829
2830 /* setup clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002831 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2832 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002833
2834 for (i = 0; i < evergreen_default_size; i++)
Christian Könige32eb502011-10-23 12:56:27 +02002835 radeon_ring_write(ring, evergreen_default_state[i]);
Alex Deucher2281a372010-10-21 13:31:38 -04002836
Christian Könige32eb502011-10-23 12:56:27 +02002837 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2838 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002839
2840 /* set clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002841 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2842 radeon_ring_write(ring, 0);
Alex Deucher2281a372010-10-21 13:31:38 -04002843
2844 /* SQ_VTX_BASE_VTX_LOC */
Christian Könige32eb502011-10-23 12:56:27 +02002845 radeon_ring_write(ring, 0xc0026f00);
2846 radeon_ring_write(ring, 0x00000000);
2847 radeon_ring_write(ring, 0x00000000);
2848 radeon_ring_write(ring, 0x00000000);
Alex Deucher2281a372010-10-21 13:31:38 -04002849
2850 /* Clear consts */
Christian Könige32eb502011-10-23 12:56:27 +02002851 radeon_ring_write(ring, 0xc0036f00);
2852 radeon_ring_write(ring, 0x00000bc4);
2853 radeon_ring_write(ring, 0xffffffff);
2854 radeon_ring_write(ring, 0xffffffff);
2855 radeon_ring_write(ring, 0xffffffff);
Alex Deucher2281a372010-10-21 13:31:38 -04002856
Christian Könige32eb502011-10-23 12:56:27 +02002857 radeon_ring_write(ring, 0xc0026900);
2858 radeon_ring_write(ring, 0x00000316);
2859 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2860 radeon_ring_write(ring, 0x00000010); /* */
Alex Deucher18ff84d2011-02-02 12:37:41 -05002861
Christian Könige32eb502011-10-23 12:56:27 +02002862 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002863
2864 return 0;
2865}
2866
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002867static int evergreen_cp_resume(struct radeon_device *rdev)
Alex Deucherfe251e22010-03-24 13:36:43 -04002868{
Christian Könige32eb502011-10-23 12:56:27 +02002869 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucherfe251e22010-03-24 13:36:43 -04002870 u32 tmp;
2871 u32 rb_bufsz;
2872 int r;
2873
2874 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2875 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2876 SOFT_RESET_PA |
2877 SOFT_RESET_SH |
2878 SOFT_RESET_VGT |
Jerome Glissea49a50d2011-08-24 20:00:17 +00002879 SOFT_RESET_SPI |
Alex Deucherfe251e22010-03-24 13:36:43 -04002880 SOFT_RESET_SX));
2881 RREG32(GRBM_SOFT_RESET);
2882 mdelay(15);
2883 WREG32(GRBM_SOFT_RESET, 0);
2884 RREG32(GRBM_SOFT_RESET);
2885
2886 /* Set ring buffer size */
Christian Könige32eb502011-10-23 12:56:27 +02002887 rb_bufsz = drm_order(ring->ring_size / 8);
Alex Deucher724c80e2010-08-27 18:25:25 -04002888 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
Alex Deucherfe251e22010-03-24 13:36:43 -04002889#ifdef __BIG_ENDIAN
2890 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002891#endif
Alex Deucherfe251e22010-03-24 13:36:43 -04002892 WREG32(CP_RB_CNTL, tmp);
Christian König15d33322011-09-15 19:02:22 +02002893 WREG32(CP_SEM_WAIT_TIMER, 0x0);
Alex Deucher11ef3f1f2012-01-20 14:47:43 -05002894 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
Alex Deucherfe251e22010-03-24 13:36:43 -04002895
2896 /* Set the write pointer delay */
2897 WREG32(CP_RB_WPTR_DELAY, 0);
2898
2899 /* Initialize the ring buffer's read and write pointers */
2900 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2901 WREG32(CP_RB_RPTR_WR, 0);
Christian Könige32eb502011-10-23 12:56:27 +02002902 ring->wptr = 0;
2903 WREG32(CP_RB_WPTR, ring->wptr);
Alex Deucher724c80e2010-08-27 18:25:25 -04002904
Adam Buchbinder48fc7f72012-09-19 21:48:00 -04002905 /* set the wb address whether it's enabled or not */
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002906 WREG32(CP_RB_RPTR_ADDR,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002907 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
Alex Deucher724c80e2010-08-27 18:25:25 -04002908 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2909 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2910
2911 if (rdev->wb.enabled)
2912 WREG32(SCRATCH_UMSK, 0xff);
2913 else {
2914 tmp |= RB_NO_UPDATE;
2915 WREG32(SCRATCH_UMSK, 0);
2916 }
2917
Alex Deucherfe251e22010-03-24 13:36:43 -04002918 mdelay(1);
2919 WREG32(CP_RB_CNTL, tmp);
2920
Christian Könige32eb502011-10-23 12:56:27 +02002921 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
Alex Deucherfe251e22010-03-24 13:36:43 -04002922 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2923
Christian Könige32eb502011-10-23 12:56:27 +02002924 ring->rptr = RREG32(CP_RB_RPTR);
Alex Deucherfe251e22010-03-24 13:36:43 -04002925
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002926 evergreen_cp_start(rdev);
Christian Könige32eb502011-10-23 12:56:27 +02002927 ring->ready = true;
Alex Deucherf7128122012-02-23 17:53:45 -05002928 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
Alex Deucherfe251e22010-03-24 13:36:43 -04002929 if (r) {
Christian Könige32eb502011-10-23 12:56:27 +02002930 ring->ready = false;
Alex Deucherfe251e22010-03-24 13:36:43 -04002931 return r;
2932 }
2933 return 0;
2934}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002935
2936/*
2937 * Core functions
2938 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002939static void evergreen_gpu_init(struct radeon_device *rdev)
2940{
Alex Deucher416a2bd2012-05-31 19:00:25 -04002941 u32 gb_addr_config;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002942 u32 mc_shared_chmap, mc_arb_ramcfg;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002943 u32 sx_debug_1;
2944 u32 smx_dc_ctl0;
2945 u32 sq_config;
2946 u32 sq_lds_resource_mgmt;
2947 u32 sq_gpr_resource_mgmt_1;
2948 u32 sq_gpr_resource_mgmt_2;
2949 u32 sq_gpr_resource_mgmt_3;
2950 u32 sq_thread_resource_mgmt;
2951 u32 sq_thread_resource_mgmt_2;
2952 u32 sq_stack_resource_mgmt_1;
2953 u32 sq_stack_resource_mgmt_2;
2954 u32 sq_stack_resource_mgmt_3;
2955 u32 vgt_cache_invalidation;
Alex Deucherf25a5c62011-05-19 11:07:57 -04002956 u32 hdp_host_path_cntl, tmp;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002957 u32 disabled_rb_mask;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002958 int i, j, num_shader_engines, ps_thread_count;
2959
2960 switch (rdev->family) {
2961 case CHIP_CYPRESS:
2962 case CHIP_HEMLOCK:
2963 rdev->config.evergreen.num_ses = 2;
2964 rdev->config.evergreen.max_pipes = 4;
2965 rdev->config.evergreen.max_tile_pipes = 8;
2966 rdev->config.evergreen.max_simds = 10;
2967 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2968 rdev->config.evergreen.max_gprs = 256;
2969 rdev->config.evergreen.max_threads = 248;
2970 rdev->config.evergreen.max_gs_threads = 32;
2971 rdev->config.evergreen.max_stack_entries = 512;
2972 rdev->config.evergreen.sx_num_of_sets = 4;
2973 rdev->config.evergreen.sx_max_export_size = 256;
2974 rdev->config.evergreen.sx_max_export_pos_size = 64;
2975 rdev->config.evergreen.sx_max_export_smx_size = 192;
2976 rdev->config.evergreen.max_hw_contexts = 8;
2977 rdev->config.evergreen.sq_num_cf_insts = 2;
2978
2979 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2980 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2981 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002982 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002983 break;
2984 case CHIP_JUNIPER:
2985 rdev->config.evergreen.num_ses = 1;
2986 rdev->config.evergreen.max_pipes = 4;
2987 rdev->config.evergreen.max_tile_pipes = 4;
2988 rdev->config.evergreen.max_simds = 10;
2989 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2990 rdev->config.evergreen.max_gprs = 256;
2991 rdev->config.evergreen.max_threads = 248;
2992 rdev->config.evergreen.max_gs_threads = 32;
2993 rdev->config.evergreen.max_stack_entries = 512;
2994 rdev->config.evergreen.sx_num_of_sets = 4;
2995 rdev->config.evergreen.sx_max_export_size = 256;
2996 rdev->config.evergreen.sx_max_export_pos_size = 64;
2997 rdev->config.evergreen.sx_max_export_smx_size = 192;
2998 rdev->config.evergreen.max_hw_contexts = 8;
2999 rdev->config.evergreen.sq_num_cf_insts = 2;
3000
3001 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3002 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3003 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003004 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003005 break;
3006 case CHIP_REDWOOD:
3007 rdev->config.evergreen.num_ses = 1;
3008 rdev->config.evergreen.max_pipes = 4;
3009 rdev->config.evergreen.max_tile_pipes = 4;
3010 rdev->config.evergreen.max_simds = 5;
3011 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3012 rdev->config.evergreen.max_gprs = 256;
3013 rdev->config.evergreen.max_threads = 248;
3014 rdev->config.evergreen.max_gs_threads = 32;
3015 rdev->config.evergreen.max_stack_entries = 256;
3016 rdev->config.evergreen.sx_num_of_sets = 4;
3017 rdev->config.evergreen.sx_max_export_size = 256;
3018 rdev->config.evergreen.sx_max_export_pos_size = 64;
3019 rdev->config.evergreen.sx_max_export_smx_size = 192;
3020 rdev->config.evergreen.max_hw_contexts = 8;
3021 rdev->config.evergreen.sq_num_cf_insts = 2;
3022
3023 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3024 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3025 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003026 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003027 break;
3028 case CHIP_CEDAR:
3029 default:
3030 rdev->config.evergreen.num_ses = 1;
3031 rdev->config.evergreen.max_pipes = 2;
3032 rdev->config.evergreen.max_tile_pipes = 2;
3033 rdev->config.evergreen.max_simds = 2;
3034 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3035 rdev->config.evergreen.max_gprs = 256;
3036 rdev->config.evergreen.max_threads = 192;
3037 rdev->config.evergreen.max_gs_threads = 16;
3038 rdev->config.evergreen.max_stack_entries = 256;
3039 rdev->config.evergreen.sx_num_of_sets = 4;
3040 rdev->config.evergreen.sx_max_export_size = 128;
3041 rdev->config.evergreen.sx_max_export_pos_size = 32;
3042 rdev->config.evergreen.sx_max_export_smx_size = 96;
3043 rdev->config.evergreen.max_hw_contexts = 4;
3044 rdev->config.evergreen.sq_num_cf_insts = 1;
3045
3046 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3047 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3048 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003049 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003050 break;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003051 case CHIP_PALM:
3052 rdev->config.evergreen.num_ses = 1;
3053 rdev->config.evergreen.max_pipes = 2;
3054 rdev->config.evergreen.max_tile_pipes = 2;
3055 rdev->config.evergreen.max_simds = 2;
3056 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3057 rdev->config.evergreen.max_gprs = 256;
3058 rdev->config.evergreen.max_threads = 192;
3059 rdev->config.evergreen.max_gs_threads = 16;
3060 rdev->config.evergreen.max_stack_entries = 256;
3061 rdev->config.evergreen.sx_num_of_sets = 4;
3062 rdev->config.evergreen.sx_max_export_size = 128;
3063 rdev->config.evergreen.sx_max_export_pos_size = 32;
3064 rdev->config.evergreen.sx_max_export_smx_size = 96;
3065 rdev->config.evergreen.max_hw_contexts = 4;
3066 rdev->config.evergreen.sq_num_cf_insts = 1;
3067
3068 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3069 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3070 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003071 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003072 break;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003073 case CHIP_SUMO:
3074 rdev->config.evergreen.num_ses = 1;
3075 rdev->config.evergreen.max_pipes = 4;
Jerome Glissebd25f072012-12-11 11:56:52 -05003076 rdev->config.evergreen.max_tile_pipes = 4;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003077 if (rdev->pdev->device == 0x9648)
3078 rdev->config.evergreen.max_simds = 3;
3079 else if ((rdev->pdev->device == 0x9647) ||
3080 (rdev->pdev->device == 0x964a))
3081 rdev->config.evergreen.max_simds = 4;
3082 else
3083 rdev->config.evergreen.max_simds = 5;
3084 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3085 rdev->config.evergreen.max_gprs = 256;
3086 rdev->config.evergreen.max_threads = 248;
3087 rdev->config.evergreen.max_gs_threads = 32;
3088 rdev->config.evergreen.max_stack_entries = 256;
3089 rdev->config.evergreen.sx_num_of_sets = 4;
3090 rdev->config.evergreen.sx_max_export_size = 256;
3091 rdev->config.evergreen.sx_max_export_pos_size = 64;
3092 rdev->config.evergreen.sx_max_export_smx_size = 192;
3093 rdev->config.evergreen.max_hw_contexts = 8;
3094 rdev->config.evergreen.sq_num_cf_insts = 2;
3095
3096 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3097 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3098 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05003099 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003100 break;
3101 case CHIP_SUMO2:
3102 rdev->config.evergreen.num_ses = 1;
3103 rdev->config.evergreen.max_pipes = 4;
3104 rdev->config.evergreen.max_tile_pipes = 4;
3105 rdev->config.evergreen.max_simds = 2;
3106 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3107 rdev->config.evergreen.max_gprs = 256;
3108 rdev->config.evergreen.max_threads = 248;
3109 rdev->config.evergreen.max_gs_threads = 32;
3110 rdev->config.evergreen.max_stack_entries = 512;
3111 rdev->config.evergreen.sx_num_of_sets = 4;
3112 rdev->config.evergreen.sx_max_export_size = 256;
3113 rdev->config.evergreen.sx_max_export_pos_size = 64;
3114 rdev->config.evergreen.sx_max_export_smx_size = 192;
3115 rdev->config.evergreen.max_hw_contexts = 8;
3116 rdev->config.evergreen.sq_num_cf_insts = 2;
3117
3118 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3119 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3120 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05003121 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003122 break;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003123 case CHIP_BARTS:
3124 rdev->config.evergreen.num_ses = 2;
3125 rdev->config.evergreen.max_pipes = 4;
3126 rdev->config.evergreen.max_tile_pipes = 8;
3127 rdev->config.evergreen.max_simds = 7;
3128 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3129 rdev->config.evergreen.max_gprs = 256;
3130 rdev->config.evergreen.max_threads = 248;
3131 rdev->config.evergreen.max_gs_threads = 32;
3132 rdev->config.evergreen.max_stack_entries = 512;
3133 rdev->config.evergreen.sx_num_of_sets = 4;
3134 rdev->config.evergreen.sx_max_export_size = 256;
3135 rdev->config.evergreen.sx_max_export_pos_size = 64;
3136 rdev->config.evergreen.sx_max_export_smx_size = 192;
3137 rdev->config.evergreen.max_hw_contexts = 8;
3138 rdev->config.evergreen.sq_num_cf_insts = 2;
3139
3140 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3141 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3142 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003143 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003144 break;
3145 case CHIP_TURKS:
3146 rdev->config.evergreen.num_ses = 1;
3147 rdev->config.evergreen.max_pipes = 4;
3148 rdev->config.evergreen.max_tile_pipes = 4;
3149 rdev->config.evergreen.max_simds = 6;
3150 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3151 rdev->config.evergreen.max_gprs = 256;
3152 rdev->config.evergreen.max_threads = 248;
3153 rdev->config.evergreen.max_gs_threads = 32;
3154 rdev->config.evergreen.max_stack_entries = 256;
3155 rdev->config.evergreen.sx_num_of_sets = 4;
3156 rdev->config.evergreen.sx_max_export_size = 256;
3157 rdev->config.evergreen.sx_max_export_pos_size = 64;
3158 rdev->config.evergreen.sx_max_export_smx_size = 192;
3159 rdev->config.evergreen.max_hw_contexts = 8;
3160 rdev->config.evergreen.sq_num_cf_insts = 2;
3161
3162 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3163 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3164 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003165 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003166 break;
3167 case CHIP_CAICOS:
3168 rdev->config.evergreen.num_ses = 1;
Jerome Glissebd25f072012-12-11 11:56:52 -05003169 rdev->config.evergreen.max_pipes = 2;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003170 rdev->config.evergreen.max_tile_pipes = 2;
3171 rdev->config.evergreen.max_simds = 2;
3172 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3173 rdev->config.evergreen.max_gprs = 256;
3174 rdev->config.evergreen.max_threads = 192;
3175 rdev->config.evergreen.max_gs_threads = 16;
3176 rdev->config.evergreen.max_stack_entries = 256;
3177 rdev->config.evergreen.sx_num_of_sets = 4;
3178 rdev->config.evergreen.sx_max_export_size = 128;
3179 rdev->config.evergreen.sx_max_export_pos_size = 32;
3180 rdev->config.evergreen.sx_max_export_smx_size = 96;
3181 rdev->config.evergreen.max_hw_contexts = 4;
3182 rdev->config.evergreen.sq_num_cf_insts = 1;
3183
3184 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3185 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3186 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003187 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003188 break;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003189 }
3190
3191 /* Initialize HDP */
3192 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3193 WREG32((0x2c14 + j), 0x00000000);
3194 WREG32((0x2c18 + j), 0x00000000);
3195 WREG32((0x2c1c + j), 0x00000000);
3196 WREG32((0x2c20 + j), 0x00000000);
3197 WREG32((0x2c24 + j), 0x00000000);
3198 }
3199
3200 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3201
Alex Deucherd054ac12011-09-01 17:46:15 +00003202 evergreen_fix_pci_max_read_req_size(rdev);
3203
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003204 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
Alex Deucher05b3ef62012-03-20 17:18:37 -04003205 if ((rdev->family == CHIP_PALM) ||
3206 (rdev->family == CHIP_SUMO) ||
3207 (rdev->family == CHIP_SUMO2))
Alex Deucherd9282fc2011-05-11 03:15:24 -04003208 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3209 else
3210 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003211
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003212 /* setup tiling info dword. gb_addr_config is not adequate since it does
3213 * not have bank info, so create a custom tiling dword.
3214 * bits 3:0 num_pipes
3215 * bits 7:4 num_banks
3216 * bits 11:8 group_size
3217 * bits 15:12 row_size
3218 */
3219 rdev->config.evergreen.tile_config = 0;
3220 switch (rdev->config.evergreen.max_tile_pipes) {
3221 case 1:
3222 default:
3223 rdev->config.evergreen.tile_config |= (0 << 0);
3224 break;
3225 case 2:
3226 rdev->config.evergreen.tile_config |= (1 << 0);
3227 break;
3228 case 4:
3229 rdev->config.evergreen.tile_config |= (2 << 0);
3230 break;
3231 case 8:
3232 rdev->config.evergreen.tile_config |= (3 << 0);
3233 break;
3234 }
Alex Deucherd698a342011-06-23 00:49:29 -04003235 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
Alex Deucher5bfa4872011-05-20 12:35:22 -04003236 if (rdev->flags & RADEON_IS_IGP)
Alex Deucherd698a342011-06-23 00:49:29 -04003237 rdev->config.evergreen.tile_config |= 1 << 4;
Alex Deucher29d65402012-05-31 18:53:36 -04003238 else {
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003239 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3240 case 0: /* four banks */
Alex Deucher29d65402012-05-31 18:53:36 -04003241 rdev->config.evergreen.tile_config |= 0 << 4;
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003242 break;
3243 case 1: /* eight banks */
3244 rdev->config.evergreen.tile_config |= 1 << 4;
3245 break;
3246 case 2: /* sixteen banks */
3247 default:
3248 rdev->config.evergreen.tile_config |= 2 << 4;
3249 break;
3250 }
Alex Deucher29d65402012-05-31 18:53:36 -04003251 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003252 rdev->config.evergreen.tile_config |= 0 << 8;
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003253 rdev->config.evergreen.tile_config |=
3254 ((gb_addr_config & 0x30000000) >> 28) << 12;
3255
Alex Deucher416a2bd2012-05-31 19:00:25 -04003256 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3257
3258 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3259 u32 efuse_straps_4;
3260 u32 efuse_straps_3;
3261
Alex Deucherff82bbc2013-04-12 11:27:20 -04003262 efuse_straps_4 = RREG32_RCU(0x204);
3263 efuse_straps_3 = RREG32_RCU(0x203);
Alex Deucher416a2bd2012-05-31 19:00:25 -04003264 tmp = (((efuse_straps_4 & 0xf) << 4) |
3265 ((efuse_straps_3 & 0xf0000000) >> 28));
3266 } else {
3267 tmp = 0;
3268 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3269 u32 rb_disable_bitmap;
3270
3271 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3272 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3273 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3274 tmp <<= 4;
3275 tmp |= rb_disable_bitmap;
3276 }
3277 }
3278 /* enabled rb are just the one not disabled :) */
3279 disabled_rb_mask = tmp;
Alex Deuchercedb6552013-04-09 10:13:22 -04003280 tmp = 0;
3281 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3282 tmp |= (1 << i);
3283 /* if all the backends are disabled, fix it up here */
3284 if ((disabled_rb_mask & tmp) == tmp) {
3285 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3286 disabled_rb_mask &= ~(1 << i);
3287 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003288
3289 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3290 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3291
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003292 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3293 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3294 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
Alex Deucher233d1ad2012-12-04 15:25:59 -05003295 WREG32(DMA_TILING_CONFIG, gb_addr_config);
Christian König9a210592013-04-08 12:41:37 +02003296 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3297 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3298 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003299
Alex Deucherf7eb9732013-01-30 13:57:40 -05003300 if ((rdev->config.evergreen.max_backends == 1) &&
3301 (rdev->flags & RADEON_IS_IGP)) {
3302 if ((disabled_rb_mask & 3) == 1) {
3303 /* RB0 disabled, RB1 enabled */
3304 tmp = 0x11111111;
3305 } else {
3306 /* RB1 disabled, RB0 enabled */
3307 tmp = 0x00000000;
3308 }
3309 } else {
3310 tmp = gb_addr_config & NUM_PIPES_MASK;
3311 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3312 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3313 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003314 WREG32(GB_BACKEND_MAP, tmp);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003315
3316 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3317 WREG32(CGTS_TCC_DISABLE, 0);
3318 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3319 WREG32(CGTS_USER_TCC_DISABLE, 0);
3320
3321 /* set HW defaults for 3D engine */
3322 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3323 ROQ_IB2_START(0x2b)));
3324
3325 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3326
3327 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3328 SYNC_GRADIENT |
3329 SYNC_WALKER |
3330 SYNC_ALIGNER));
3331
3332 sx_debug_1 = RREG32(SX_DEBUG_1);
3333 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3334 WREG32(SX_DEBUG_1, sx_debug_1);
3335
3336
3337 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3338 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3339 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3340 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3341
Alex Deucherb866d132012-06-14 22:06:36 +02003342 if (rdev->family <= CHIP_SUMO2)
3343 WREG32(SMX_SAR_CTL0, 0x00010000);
3344
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003345 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3346 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3347 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3348
3349 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3350 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3351 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3352
3353 WREG32(VGT_NUM_INSTANCES, 1);
3354 WREG32(SPI_CONFIG_CNTL, 0);
3355 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3356 WREG32(CP_PERFMON_CNTL, 0);
3357
3358 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3359 FETCH_FIFO_HIWATER(0x4) |
3360 DONE_FIFO_HIWATER(0xe0) |
3361 ALU_UPDATE_FIFO_HIWATER(0x8)));
3362
3363 sq_config = RREG32(SQ_CONFIG);
3364 sq_config &= ~(PS_PRIO(3) |
3365 VS_PRIO(3) |
3366 GS_PRIO(3) |
3367 ES_PRIO(3));
3368 sq_config |= (VC_ENABLE |
3369 EXPORT_SRC_C |
3370 PS_PRIO(0) |
3371 VS_PRIO(1) |
3372 GS_PRIO(2) |
3373 ES_PRIO(3));
3374
Alex Deucherd5e455e2010-11-22 17:56:29 -05003375 switch (rdev->family) {
3376 case CHIP_CEDAR:
3377 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003378 case CHIP_SUMO:
3379 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003380 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003381 /* no vertex cache */
3382 sq_config &= ~VC_ENABLE;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003383 break;
3384 default:
3385 break;
3386 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003387
3388 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3389
3390 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3391 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3392 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3393 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3394 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3395 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3396 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3397
Alex Deucherd5e455e2010-11-22 17:56:29 -05003398 switch (rdev->family) {
3399 case CHIP_CEDAR:
3400 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003401 case CHIP_SUMO:
3402 case CHIP_SUMO2:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003403 ps_thread_count = 96;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003404 break;
3405 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003406 ps_thread_count = 128;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003407 break;
3408 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003409
3410 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04003411 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3412 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3413 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3414 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3415 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003416
3417 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3418 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3419 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3420 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3421 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3422 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3423
3424 WREG32(SQ_CONFIG, sq_config);
3425 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3426 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3427 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3428 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3429 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3430 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3431 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3432 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3433 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3434 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3435
3436 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3437 FORCE_EOV_MAX_REZ_CNT(255)));
3438
Alex Deucherd5e455e2010-11-22 17:56:29 -05003439 switch (rdev->family) {
3440 case CHIP_CEDAR:
3441 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003442 case CHIP_SUMO:
3443 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003444 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003445 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003446 break;
3447 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003448 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003449 break;
3450 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003451 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3452 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3453
3454 WREG32(VGT_GS_VERTEX_REUSE, 16);
Alex Deucher12920592011-02-02 12:37:40 -05003455 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003456 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3457
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003458 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3459 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3460
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003461 WREG32(CB_PERF_CTR0_SEL_0, 0);
3462 WREG32(CB_PERF_CTR0_SEL_1, 0);
3463 WREG32(CB_PERF_CTR1_SEL_0, 0);
3464 WREG32(CB_PERF_CTR1_SEL_1, 0);
3465 WREG32(CB_PERF_CTR2_SEL_0, 0);
3466 WREG32(CB_PERF_CTR2_SEL_1, 0);
3467 WREG32(CB_PERF_CTR3_SEL_0, 0);
3468 WREG32(CB_PERF_CTR3_SEL_1, 0);
3469
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003470 /* clear render buffer base addresses */
3471 WREG32(CB_COLOR0_BASE, 0);
3472 WREG32(CB_COLOR1_BASE, 0);
3473 WREG32(CB_COLOR2_BASE, 0);
3474 WREG32(CB_COLOR3_BASE, 0);
3475 WREG32(CB_COLOR4_BASE, 0);
3476 WREG32(CB_COLOR5_BASE, 0);
3477 WREG32(CB_COLOR6_BASE, 0);
3478 WREG32(CB_COLOR7_BASE, 0);
3479 WREG32(CB_COLOR8_BASE, 0);
3480 WREG32(CB_COLOR9_BASE, 0);
3481 WREG32(CB_COLOR10_BASE, 0);
3482 WREG32(CB_COLOR11_BASE, 0);
3483
3484 /* set the shader const cache sizes to 0 */
3485 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3486 WREG32(i, 0);
3487 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3488 WREG32(i, 0);
3489
Alex Deucherf25a5c62011-05-19 11:07:57 -04003490 tmp = RREG32(HDP_MISC_CNTL);
3491 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3492 WREG32(HDP_MISC_CNTL, tmp);
3493
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003494 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3495 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3496
3497 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3498
3499 udelay(50);
3500
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003501}
3502
3503int evergreen_mc_init(struct radeon_device *rdev)
3504{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003505 u32 tmp;
3506 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003507
3508 /* Get VRAM informations */
3509 rdev->mc.vram_is_ddr = true;
Alex Deucher05b3ef62012-03-20 17:18:37 -04003510 if ((rdev->family == CHIP_PALM) ||
3511 (rdev->family == CHIP_SUMO) ||
3512 (rdev->family == CHIP_SUMO2))
Alex Deucher82084412011-07-01 13:18:28 -04003513 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3514 else
3515 tmp = RREG32(MC_ARB_RAMCFG);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003516 if (tmp & CHANSIZE_OVERRIDE) {
3517 chansize = 16;
3518 } else if (tmp & CHANSIZE_MASK) {
3519 chansize = 64;
3520 } else {
3521 chansize = 32;
3522 }
3523 tmp = RREG32(MC_SHARED_CHMAP);
3524 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3525 case 0:
3526 default:
3527 numchan = 1;
3528 break;
3529 case 1:
3530 numchan = 2;
3531 break;
3532 case 2:
3533 numchan = 4;
3534 break;
3535 case 3:
3536 numchan = 8;
3537 break;
3538 }
3539 rdev->mc.vram_width = numchan * chansize;
3540 /* Could aper size report 0 ? */
Jordan Crouse01d73a62010-05-27 13:40:24 -06003541 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3542 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003543 /* Setup GPU memory space */
Alex Deucher05b3ef62012-03-20 17:18:37 -04003544 if ((rdev->family == CHIP_PALM) ||
3545 (rdev->family == CHIP_SUMO) ||
3546 (rdev->family == CHIP_SUMO2)) {
Alex Deucher6eb18f82010-11-22 17:56:27 -05003547 /* size in bytes on fusion */
3548 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3549 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3550 } else {
Alex Deucher05b3ef62012-03-20 17:18:37 -04003551 /* size in MB on evergreen/cayman/tn */
Niels Ole Salscheiderfc986032013-05-18 21:19:23 +02003552 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3553 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
Alex Deucher6eb18f82010-11-22 17:56:27 -05003554 }
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00003555 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Alex Deucher0ef0c1f2010-11-22 17:56:26 -05003556 r700_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04003557 radeon_update_bandwidth_info(rdev);
3558
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003559 return 0;
3560}
Jerome Glissed594e462010-02-17 21:54:29 +00003561
Alex Deucher187e3592013-01-18 14:51:38 -05003562void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
Alex Deucher747943e2010-03-24 13:26:36 -04003563{
Jerome Glisse64c56e82013-01-02 17:30:35 -05003564 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003565 RREG32(GRBM_STATUS));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003566 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003567 RREG32(GRBM_STATUS_SE0));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003568 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003569 RREG32(GRBM_STATUS_SE1));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003570 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003571 RREG32(SRBM_STATUS));
Alex Deuchera65a4362013-01-18 18:55:54 -05003572 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3573 RREG32(SRBM_STATUS2));
Jerome Glisse440a7cd2012-06-27 12:25:01 -04003574 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3575 RREG32(CP_STALLED_STAT1));
3576 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3577 RREG32(CP_STALLED_STAT2));
3578 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3579 RREG32(CP_BUSY_STAT));
3580 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3581 RREG32(CP_STAT));
Alex Deucher0ecebb92013-01-03 12:40:13 -05003582 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3583 RREG32(DMA_STATUS_REG));
Alex Deucher168757e2013-01-18 19:17:22 -05003584 if (rdev->family >= CHIP_CAYMAN) {
3585 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3586 RREG32(DMA_STATUS_REG + 0x800));
3587 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003588}
3589
Alex Deucher168757e2013-01-18 19:17:22 -05003590bool evergreen_is_display_hung(struct radeon_device *rdev)
Alex Deuchera65a4362013-01-18 18:55:54 -05003591{
3592 u32 crtc_hung = 0;
3593 u32 crtc_status[6];
3594 u32 i, j, tmp;
3595
3596 for (i = 0; i < rdev->num_crtc; i++) {
3597 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3598 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3599 crtc_hung |= (1 << i);
3600 }
3601 }
3602
3603 for (j = 0; j < 10; j++) {
3604 for (i = 0; i < rdev->num_crtc; i++) {
3605 if (crtc_hung & (1 << i)) {
3606 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3607 if (tmp != crtc_status[i])
3608 crtc_hung &= ~(1 << i);
3609 }
3610 }
3611 if (crtc_hung == 0)
3612 return false;
3613 udelay(100);
3614 }
3615
3616 return true;
3617}
3618
Christian König2483b4e2013-08-13 11:56:54 +02003619u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
Alex Deuchera65a4362013-01-18 18:55:54 -05003620{
3621 u32 reset_mask = 0;
3622 u32 tmp;
3623
3624 /* GRBM_STATUS */
3625 tmp = RREG32(GRBM_STATUS);
3626 if (tmp & (PA_BUSY | SC_BUSY |
3627 SH_BUSY | SX_BUSY |
3628 TA_BUSY | VGT_BUSY |
3629 DB_BUSY | CB_BUSY |
3630 SPI_BUSY | VGT_BUSY_NO_DMA))
3631 reset_mask |= RADEON_RESET_GFX;
3632
3633 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3634 CP_BUSY | CP_COHERENCY_BUSY))
3635 reset_mask |= RADEON_RESET_CP;
3636
3637 if (tmp & GRBM_EE_BUSY)
3638 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3639
3640 /* DMA_STATUS_REG */
3641 tmp = RREG32(DMA_STATUS_REG);
3642 if (!(tmp & DMA_IDLE))
3643 reset_mask |= RADEON_RESET_DMA;
3644
3645 /* SRBM_STATUS2 */
3646 tmp = RREG32(SRBM_STATUS2);
3647 if (tmp & DMA_BUSY)
3648 reset_mask |= RADEON_RESET_DMA;
3649
3650 /* SRBM_STATUS */
3651 tmp = RREG32(SRBM_STATUS);
3652 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3653 reset_mask |= RADEON_RESET_RLC;
3654
3655 if (tmp & IH_BUSY)
3656 reset_mask |= RADEON_RESET_IH;
3657
3658 if (tmp & SEM_BUSY)
3659 reset_mask |= RADEON_RESET_SEM;
3660
3661 if (tmp & GRBM_RQ_PENDING)
3662 reset_mask |= RADEON_RESET_GRBM;
3663
3664 if (tmp & VMC_BUSY)
3665 reset_mask |= RADEON_RESET_VMC;
3666
3667 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3668 MCC_BUSY | MCD_BUSY))
3669 reset_mask |= RADEON_RESET_MC;
3670
3671 if (evergreen_is_display_hung(rdev))
3672 reset_mask |= RADEON_RESET_DISPLAY;
3673
3674 /* VM_L2_STATUS */
3675 tmp = RREG32(VM_L2_STATUS);
3676 if (tmp & L2_BUSY)
3677 reset_mask |= RADEON_RESET_VMC;
3678
Alex Deucherd808fc82013-02-28 10:03:08 -05003679 /* Skip MC reset as it's mostly likely not hung, just busy */
3680 if (reset_mask & RADEON_RESET_MC) {
3681 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3682 reset_mask &= ~RADEON_RESET_MC;
3683 }
3684
Alex Deuchera65a4362013-01-18 18:55:54 -05003685 return reset_mask;
3686}
3687
3688static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
Alex Deucher0ecebb92013-01-03 12:40:13 -05003689{
3690 struct evergreen_mc_save save;
Alex Deucherb7630472013-01-18 14:28:41 -05003691 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3692 u32 tmp;
Alex Deucher19fc42e2013-01-14 11:04:39 -05003693
Alex Deucher0ecebb92013-01-03 12:40:13 -05003694 if (reset_mask == 0)
Alex Deuchera65a4362013-01-18 18:55:54 -05003695 return;
Alex Deucher0ecebb92013-01-03 12:40:13 -05003696
3697 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3698
Alex Deucherb7630472013-01-18 14:28:41 -05003699 evergreen_print_gpu_status_regs(rdev);
3700
Alex Deucherb7630472013-01-18 14:28:41 -05003701 /* Disable CP parsing/prefetching */
3702 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3703
3704 if (reset_mask & RADEON_RESET_DMA) {
3705 /* Disable DMA */
3706 tmp = RREG32(DMA_RB_CNTL);
3707 tmp &= ~DMA_RB_ENABLE;
3708 WREG32(DMA_RB_CNTL, tmp);
3709 }
3710
Alex Deucherb21b6e72013-01-23 18:57:56 -05003711 udelay(50);
3712
3713 evergreen_mc_stop(rdev, &save);
3714 if (evergreen_mc_wait_for_idle(rdev)) {
3715 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3716 }
3717
Alex Deucherb7630472013-01-18 14:28:41 -05003718 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3719 grbm_soft_reset |= SOFT_RESET_DB |
3720 SOFT_RESET_CB |
3721 SOFT_RESET_PA |
3722 SOFT_RESET_SC |
3723 SOFT_RESET_SPI |
3724 SOFT_RESET_SX |
3725 SOFT_RESET_SH |
3726 SOFT_RESET_TC |
3727 SOFT_RESET_TA |
3728 SOFT_RESET_VC |
3729 SOFT_RESET_VGT;
3730 }
3731
3732 if (reset_mask & RADEON_RESET_CP) {
3733 grbm_soft_reset |= SOFT_RESET_CP |
3734 SOFT_RESET_VGT;
3735
3736 srbm_soft_reset |= SOFT_RESET_GRBM;
3737 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003738
3739 if (reset_mask & RADEON_RESET_DMA)
Alex Deucherb7630472013-01-18 14:28:41 -05003740 srbm_soft_reset |= SOFT_RESET_DMA;
3741
Alex Deuchera65a4362013-01-18 18:55:54 -05003742 if (reset_mask & RADEON_RESET_DISPLAY)
3743 srbm_soft_reset |= SOFT_RESET_DC;
3744
3745 if (reset_mask & RADEON_RESET_RLC)
3746 srbm_soft_reset |= SOFT_RESET_RLC;
3747
3748 if (reset_mask & RADEON_RESET_SEM)
3749 srbm_soft_reset |= SOFT_RESET_SEM;
3750
3751 if (reset_mask & RADEON_RESET_IH)
3752 srbm_soft_reset |= SOFT_RESET_IH;
3753
3754 if (reset_mask & RADEON_RESET_GRBM)
3755 srbm_soft_reset |= SOFT_RESET_GRBM;
3756
3757 if (reset_mask & RADEON_RESET_VMC)
3758 srbm_soft_reset |= SOFT_RESET_VMC;
3759
Alex Deucher24178ec2013-01-24 15:00:17 -05003760 if (!(rdev->flags & RADEON_IS_IGP)) {
3761 if (reset_mask & RADEON_RESET_MC)
3762 srbm_soft_reset |= SOFT_RESET_MC;
3763 }
Alex Deuchera65a4362013-01-18 18:55:54 -05003764
Alex Deucherb7630472013-01-18 14:28:41 -05003765 if (grbm_soft_reset) {
3766 tmp = RREG32(GRBM_SOFT_RESET);
3767 tmp |= grbm_soft_reset;
3768 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3769 WREG32(GRBM_SOFT_RESET, tmp);
3770 tmp = RREG32(GRBM_SOFT_RESET);
3771
3772 udelay(50);
3773
3774 tmp &= ~grbm_soft_reset;
3775 WREG32(GRBM_SOFT_RESET, tmp);
3776 tmp = RREG32(GRBM_SOFT_RESET);
3777 }
3778
3779 if (srbm_soft_reset) {
3780 tmp = RREG32(SRBM_SOFT_RESET);
3781 tmp |= srbm_soft_reset;
3782 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3783 WREG32(SRBM_SOFT_RESET, tmp);
3784 tmp = RREG32(SRBM_SOFT_RESET);
3785
3786 udelay(50);
3787
3788 tmp &= ~srbm_soft_reset;
3789 WREG32(SRBM_SOFT_RESET, tmp);
3790 tmp = RREG32(SRBM_SOFT_RESET);
3791 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003792
3793 /* Wait a little for things to settle down */
3794 udelay(50);
3795
Alex Deucher747943e2010-03-24 13:26:36 -04003796 evergreen_mc_resume(rdev, &save);
Alex Deucherb7630472013-01-18 14:28:41 -05003797 udelay(50);
Alex Deucher410a3412013-01-18 13:05:39 -05003798
Alex Deucherb7630472013-01-18 14:28:41 -05003799 evergreen_print_gpu_status_regs(rdev);
Alex Deucher747943e2010-03-24 13:26:36 -04003800}
3801
Jerome Glissea2d07b72010-03-09 14:45:11 +00003802int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003803{
Alex Deuchera65a4362013-01-18 18:55:54 -05003804 u32 reset_mask;
3805
3806 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3807
3808 if (reset_mask)
3809 r600_set_bios_scratch_engine_hung(rdev, true);
3810
3811 evergreen_gpu_soft_reset(rdev, reset_mask);
3812
3813 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3814
3815 if (!reset_mask)
3816 r600_set_bios_scratch_engine_hung(rdev, false);
3817
3818 return 0;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003819}
3820
Alex Deucher123bc182013-01-24 11:37:19 -05003821/**
3822 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3823 *
3824 * @rdev: radeon_device pointer
3825 * @ring: radeon_ring structure holding ring information
3826 *
3827 * Check if the GFX engine is locked up.
3828 * Returns true if the engine appears to be locked up, false if not.
3829 */
3830bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3831{
3832 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3833
3834 if (!(reset_mask & (RADEON_RESET_GFX |
3835 RADEON_RESET_COMPUTE |
3836 RADEON_RESET_CP))) {
3837 radeon_ring_lockup_update(ring);
3838 return false;
3839 }
3840 /* force CP activities */
3841 radeon_ring_force_activity(rdev, ring);
3842 return radeon_ring_test_lockup(rdev, ring);
3843}
3844
Alex Deucher2948f5e2013-04-12 13:52:52 -04003845/*
3846 * RLC
3847 */
3848#define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
3849#define RLC_CLEAR_STATE_END_MARKER 0x00000001
3850
3851void sumo_rlc_fini(struct radeon_device *rdev)
3852{
3853 int r;
3854
3855 /* save restore block */
3856 if (rdev->rlc.save_restore_obj) {
3857 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3858 if (unlikely(r != 0))
3859 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3860 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3861 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3862
3863 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3864 rdev->rlc.save_restore_obj = NULL;
3865 }
3866
3867 /* clear state block */
3868 if (rdev->rlc.clear_state_obj) {
3869 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3870 if (unlikely(r != 0))
3871 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3872 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3873 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3874
3875 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3876 rdev->rlc.clear_state_obj = NULL;
3877 }
Alex Deucher22c775c2013-07-23 09:41:05 -04003878
3879 /* clear state block */
3880 if (rdev->rlc.cp_table_obj) {
3881 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3882 if (unlikely(r != 0))
3883 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3884 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3885 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3886
3887 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3888 rdev->rlc.cp_table_obj = NULL;
3889 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003890}
3891
Alex Deucher22c775c2013-07-23 09:41:05 -04003892#define CP_ME_TABLE_SIZE 96
3893
Alex Deucher2948f5e2013-04-12 13:52:52 -04003894int sumo_rlc_init(struct radeon_device *rdev)
3895{
Alex Deucher1fd11772013-04-17 17:53:50 -04003896 const u32 *src_ptr;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003897 volatile u32 *dst_ptr;
3898 u32 dws, data, i, j, k, reg_num;
Alex Deucher59a82d02013-08-13 12:48:06 -04003899 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003900 u64 reg_list_mc_addr;
Alex Deucher1fd11772013-04-17 17:53:50 -04003901 const struct cs_section_def *cs_data;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003902 int r;
3903
3904 src_ptr = rdev->rlc.reg_list;
3905 dws = rdev->rlc.reg_list_size;
3906 cs_data = rdev->rlc.cs_data;
3907
Alex Deucher10b7ca72013-04-17 17:22:05 -04003908 if (src_ptr) {
3909 /* save restore block */
3910 if (rdev->rlc.save_restore_obj == NULL) {
3911 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3912 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3913 if (r) {
3914 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3915 return r;
3916 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003917 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003918
Alex Deucher10b7ca72013-04-17 17:22:05 -04003919 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3920 if (unlikely(r != 0)) {
Alex Deucher2948f5e2013-04-12 13:52:52 -04003921 sumo_rlc_fini(rdev);
3922 return r;
3923 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003924 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3925 &rdev->rlc.save_restore_gpu_addr);
3926 if (r) {
3927 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3928 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3929 sumo_rlc_fini(rdev);
3930 return r;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003931 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003932
Alex Deucher10b7ca72013-04-17 17:22:05 -04003933 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3934 if (r) {
3935 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3936 sumo_rlc_fini(rdev);
3937 return r;
3938 }
3939 /* write the sr buffer */
3940 dst_ptr = rdev->rlc.sr_ptr;
Alex Deucher1fd11772013-04-17 17:53:50 -04003941 if (rdev->family >= CHIP_TAHITI) {
3942 /* SI */
Alex Deucher59a82d02013-08-13 12:48:06 -04003943 for (i = 0; i < rdev->rlc.reg_list_size; i++)
Alex Deucher1fd11772013-04-17 17:53:50 -04003944 dst_ptr[i] = src_ptr[i];
3945 } else {
3946 /* ON/LN/TN */
3947 /* format:
3948 * dw0: (reg2 << 16) | reg1
3949 * dw1: reg1 save space
3950 * dw2: reg2 save space
3951 */
3952 for (i = 0; i < dws; i++) {
3953 data = src_ptr[i] >> 2;
3954 i++;
3955 if (i < dws)
3956 data |= (src_ptr[i] >> 2) << 16;
3957 j = (((i - 1) * 3) / 2);
3958 dst_ptr[j] = data;
3959 }
3960 j = ((i * 3) / 2);
3961 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
Alex Deucher10b7ca72013-04-17 17:22:05 -04003962 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003963 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3964 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3965 }
3966
3967 if (cs_data) {
3968 /* clear state block */
Alex Deucher59a82d02013-08-13 12:48:06 -04003969 if (rdev->family >= CHIP_TAHITI) {
3970 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
3971 dws = rdev->rlc.clear_state_size + (256 / 4);
3972 } else {
3973 reg_list_num = 0;
3974 dws = 0;
3975 for (i = 0; cs_data[i].section != NULL; i++) {
3976 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3977 reg_list_num++;
3978 dws += cs_data[i].section[j].reg_count;
3979 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003980 }
Alex Deucher59a82d02013-08-13 12:48:06 -04003981 reg_list_blk_index = (3 * reg_list_num + 2);
3982 dws += reg_list_blk_index;
3983 rdev->rlc.clear_state_size = dws;
Alex Deucher10b7ca72013-04-17 17:22:05 -04003984 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003985
3986 if (rdev->rlc.clear_state_obj == NULL) {
Alex Deucher59a82d02013-08-13 12:48:06 -04003987 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
Alex Deucher10b7ca72013-04-17 17:22:05 -04003988 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3989 if (r) {
3990 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3991 sumo_rlc_fini(rdev);
3992 return r;
3993 }
3994 }
3995 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3996 if (unlikely(r != 0)) {
3997 sumo_rlc_fini(rdev);
3998 return r;
3999 }
4000 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4001 &rdev->rlc.clear_state_gpu_addr);
4002 if (r) {
4003 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4004 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4005 sumo_rlc_fini(rdev);
4006 return r;
4007 }
4008
4009 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4010 if (r) {
4011 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4012 sumo_rlc_fini(rdev);
4013 return r;
4014 }
4015 /* set up the cs buffer */
4016 dst_ptr = rdev->rlc.cs_ptr;
Alex Deucher59a82d02013-08-13 12:48:06 -04004017 if (rdev->family >= CHIP_TAHITI) {
4018 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4019 dst_ptr[0] = upper_32_bits(reg_list_mc_addr);
4020 dst_ptr[1] = lower_32_bits(reg_list_mc_addr);
4021 dst_ptr[2] = rdev->rlc.clear_state_size;
4022 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4023 } else {
4024 reg_list_hdr_blk_index = 0;
4025 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4026 data = upper_32_bits(reg_list_mc_addr);
4027 dst_ptr[reg_list_hdr_blk_index] = data;
4028 reg_list_hdr_blk_index++;
4029 for (i = 0; cs_data[i].section != NULL; i++) {
4030 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4031 reg_num = cs_data[i].section[j].reg_count;
4032 data = reg_list_mc_addr & 0xffffffff;
4033 dst_ptr[reg_list_hdr_blk_index] = data;
4034 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004035
Alex Deucher59a82d02013-08-13 12:48:06 -04004036 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4037 dst_ptr[reg_list_hdr_blk_index] = data;
4038 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004039
Alex Deucher59a82d02013-08-13 12:48:06 -04004040 data = 0x08000000 | (reg_num * 4);
4041 dst_ptr[reg_list_hdr_blk_index] = data;
4042 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004043
Alex Deucher59a82d02013-08-13 12:48:06 -04004044 for (k = 0; k < reg_num; k++) {
4045 data = cs_data[i].section[j].extent[k];
4046 dst_ptr[reg_list_blk_index + k] = data;
4047 }
4048 reg_list_mc_addr += reg_num * 4;
4049 reg_list_blk_index += reg_num;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004050 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04004051 }
Alex Deucher59a82d02013-08-13 12:48:06 -04004052 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004053 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04004054 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4055 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4056 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004057
Alex Deucher22c775c2013-07-23 09:41:05 -04004058 if (rdev->rlc.cp_table_size) {
4059 if (rdev->rlc.cp_table_obj == NULL) {
4060 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4061 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4062 if (r) {
4063 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4064 sumo_rlc_fini(rdev);
4065 return r;
4066 }
4067 }
4068
4069 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4070 if (unlikely(r != 0)) {
4071 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4072 sumo_rlc_fini(rdev);
4073 return r;
4074 }
4075 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4076 &rdev->rlc.cp_table_gpu_addr);
4077 if (r) {
4078 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4079 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4080 sumo_rlc_fini(rdev);
4081 return r;
4082 }
4083 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4084 if (r) {
4085 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4086 sumo_rlc_fini(rdev);
4087 return r;
4088 }
4089
4090 cik_init_cp_pg_table(rdev);
4091
4092 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4093 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4094
4095 }
4096
Alex Deucher2948f5e2013-04-12 13:52:52 -04004097 return 0;
4098}
4099
4100static void evergreen_rlc_start(struct radeon_device *rdev)
4101{
Alex Deucher8ba10462013-02-15 16:26:33 -05004102 u32 mask = RLC_ENABLE;
4103
4104 if (rdev->flags & RADEON_IS_IGP) {
4105 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
Alex Deucher8ba10462013-02-15 16:26:33 -05004106 }
4107
4108 WREG32(RLC_CNTL, mask);
Alex Deucher2948f5e2013-04-12 13:52:52 -04004109}
4110
4111int evergreen_rlc_resume(struct radeon_device *rdev)
4112{
4113 u32 i;
4114 const __be32 *fw_data;
4115
4116 if (!rdev->rlc_fw)
4117 return -EINVAL;
4118
4119 r600_rlc_stop(rdev);
4120
4121 WREG32(RLC_HB_CNTL, 0);
4122
4123 if (rdev->flags & RADEON_IS_IGP) {
Alex Deucher8ba10462013-02-15 16:26:33 -05004124 if (rdev->family == CHIP_ARUBA) {
4125 u32 always_on_bitmap =
4126 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4127 /* find out the number of active simds */
4128 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4129 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4130 tmp = hweight32(~tmp);
4131 if (tmp == rdev->config.cayman.max_simds_per_se) {
4132 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4133 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4134 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4135 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4136 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4137 }
4138 } else {
4139 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4140 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4141 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004142 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4143 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4144 } else {
4145 WREG32(RLC_HB_BASE, 0);
4146 WREG32(RLC_HB_RPTR, 0);
4147 WREG32(RLC_HB_WPTR, 0);
Alex Deucher8ba10462013-02-15 16:26:33 -05004148 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4149 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
Alex Deucher2948f5e2013-04-12 13:52:52 -04004150 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004151 WREG32(RLC_MC_CNTL, 0);
4152 WREG32(RLC_UCODE_CNTL, 0);
4153
4154 fw_data = (const __be32 *)rdev->rlc_fw->data;
4155 if (rdev->family >= CHIP_ARUBA) {
4156 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4157 WREG32(RLC_UCODE_ADDR, i);
4158 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4159 }
4160 } else if (rdev->family >= CHIP_CAYMAN) {
4161 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4162 WREG32(RLC_UCODE_ADDR, i);
4163 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4164 }
4165 } else {
4166 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4167 WREG32(RLC_UCODE_ADDR, i);
4168 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4169 }
4170 }
4171 WREG32(RLC_UCODE_ADDR, 0);
4172
4173 evergreen_rlc_start(rdev);
4174
4175 return 0;
4176}
4177
Alex Deucher45f9a392010-03-24 13:55:51 -04004178/* Interrupts */
4179
4180u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4181{
Alex Deucher46437052012-08-15 17:10:32 -04004182 if (crtc >= rdev->num_crtc)
Alex Deucher45f9a392010-03-24 13:55:51 -04004183 return 0;
Alex Deucher46437052012-08-15 17:10:32 -04004184 else
4185 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
Alex Deucher45f9a392010-03-24 13:55:51 -04004186}
4187
4188void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4189{
4190 u32 tmp;
4191
Alex Deucher1b370782011-11-17 20:13:28 -05004192 if (rdev->family >= CHIP_CAYMAN) {
4193 cayman_cp_int_cntl_setup(rdev, 0,
4194 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4195 cayman_cp_int_cntl_setup(rdev, 1, 0);
4196 cayman_cp_int_cntl_setup(rdev, 2, 0);
Alex Deucherf60cbd12012-12-04 15:27:33 -05004197 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4198 WREG32(CAYMAN_DMA1_CNTL, tmp);
Alex Deucher1b370782011-11-17 20:13:28 -05004199 } else
4200 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004201 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4202 WREG32(DMA_CNTL, tmp);
Alex Deucher45f9a392010-03-24 13:55:51 -04004203 WREG32(GRBM_INT_CNTL, 0);
4204 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4205 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004206 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004207 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4208 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004209 }
4210 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004211 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4212 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4213 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004214
4215 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4216 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004217 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004218 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4219 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004220 }
4221 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004222 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4223 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4224 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004225
Alex Deucher05b3ef62012-03-20 17:18:37 -04004226 /* only one DAC on DCE6 */
4227 if (!ASIC_IS_DCE6(rdev))
4228 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
Alex Deucher45f9a392010-03-24 13:55:51 -04004229 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4230
4231 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4232 WREG32(DC_HPD1_INT_CONTROL, tmp);
4233 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4234 WREG32(DC_HPD2_INT_CONTROL, tmp);
4235 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4236 WREG32(DC_HPD3_INT_CONTROL, tmp);
4237 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4238 WREG32(DC_HPD4_INT_CONTROL, tmp);
4239 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4240 WREG32(DC_HPD5_INT_CONTROL, tmp);
4241 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4242 WREG32(DC_HPD6_INT_CONTROL, tmp);
4243
4244}
4245
4246int evergreen_irq_set(struct radeon_device *rdev)
4247{
4248 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
Alex Deucher1b370782011-11-17 20:13:28 -05004249 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04004250 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4251 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04004252 u32 grbm_int_cntl = 0;
Alex Deucher6f34be52010-11-21 10:59:01 -05004253 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
Alex Deucherf122c612012-03-30 08:59:57 -04004254 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
Alex Deucherf60cbd12012-12-04 15:27:33 -05004255 u32 dma_cntl, dma_cntl1 = 0;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004256 u32 thermal_int = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04004257
4258 if (!rdev->irq.installed) {
Joe Perchesfce7d612010-10-30 21:08:30 +00004259 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Alex Deucher45f9a392010-03-24 13:55:51 -04004260 return -EINVAL;
4261 }
4262 /* don't enable anything if the ih is disabled */
4263 if (!rdev->ih.enabled) {
4264 r600_disable_interrupts(rdev);
4265 /* force the active interrupt state to all disabled */
4266 evergreen_disable_interrupt_state(rdev);
4267 return 0;
4268 }
4269
4270 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4271 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4272 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4273 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4274 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4275 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
Alex Deucherd70229f2013-04-12 16:40:41 -04004276 if (rdev->family == CHIP_ARUBA)
4277 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4278 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4279 else
4280 thermal_int = RREG32(CG_THERMAL_INT) &
4281 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
Alex Deucher45f9a392010-03-24 13:55:51 -04004282
Alex Deucherf122c612012-03-30 08:59:57 -04004283 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4284 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4285 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4286 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4287 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4288 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4289
Alex Deucher233d1ad2012-12-04 15:25:59 -05004290 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4291
Alex Deucher1b370782011-11-17 20:13:28 -05004292 if (rdev->family >= CHIP_CAYMAN) {
4293 /* enable CP interrupts on all rings */
Christian Koenig736fc372012-05-17 19:52:00 +02004294 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004295 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4296 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4297 }
Christian Koenig736fc372012-05-17 19:52:00 +02004298 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004299 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4300 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4301 }
Christian Koenig736fc372012-05-17 19:52:00 +02004302 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004303 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4304 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4305 }
4306 } else {
Christian Koenig736fc372012-05-17 19:52:00 +02004307 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004308 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4309 cp_int_cntl |= RB_INT_ENABLE;
4310 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4311 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004312 }
Alex Deucher1b370782011-11-17 20:13:28 -05004313
Alex Deucher233d1ad2012-12-04 15:25:59 -05004314 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4315 DRM_DEBUG("r600_irq_set: sw int dma\n");
4316 dma_cntl |= TRAP_ENABLE;
4317 }
4318
Alex Deucherf60cbd12012-12-04 15:27:33 -05004319 if (rdev->family >= CHIP_CAYMAN) {
4320 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4321 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4322 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4323 dma_cntl1 |= TRAP_ENABLE;
4324 }
4325 }
4326
Alex Deucherdc50ba72013-06-26 00:33:35 -04004327 if (rdev->irq.dpm_thermal) {
4328 DRM_DEBUG("dpm thermal\n");
4329 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4330 }
4331
Alex Deucher6f34be52010-11-21 10:59:01 -05004332 if (rdev->irq.crtc_vblank_int[0] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004333 atomic_read(&rdev->irq.pflip[0])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004334 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4335 crtc1 |= VBLANK_INT_MASK;
4336 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004337 if (rdev->irq.crtc_vblank_int[1] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004338 atomic_read(&rdev->irq.pflip[1])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004339 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4340 crtc2 |= VBLANK_INT_MASK;
4341 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004342 if (rdev->irq.crtc_vblank_int[2] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004343 atomic_read(&rdev->irq.pflip[2])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004344 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4345 crtc3 |= VBLANK_INT_MASK;
4346 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004347 if (rdev->irq.crtc_vblank_int[3] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004348 atomic_read(&rdev->irq.pflip[3])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004349 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4350 crtc4 |= VBLANK_INT_MASK;
4351 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004352 if (rdev->irq.crtc_vblank_int[4] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004353 atomic_read(&rdev->irq.pflip[4])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004354 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4355 crtc5 |= VBLANK_INT_MASK;
4356 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004357 if (rdev->irq.crtc_vblank_int[5] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004358 atomic_read(&rdev->irq.pflip[5])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004359 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4360 crtc6 |= VBLANK_INT_MASK;
4361 }
4362 if (rdev->irq.hpd[0]) {
4363 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4364 hpd1 |= DC_HPDx_INT_EN;
4365 }
4366 if (rdev->irq.hpd[1]) {
4367 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4368 hpd2 |= DC_HPDx_INT_EN;
4369 }
4370 if (rdev->irq.hpd[2]) {
4371 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4372 hpd3 |= DC_HPDx_INT_EN;
4373 }
4374 if (rdev->irq.hpd[3]) {
4375 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4376 hpd4 |= DC_HPDx_INT_EN;
4377 }
4378 if (rdev->irq.hpd[4]) {
4379 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4380 hpd5 |= DC_HPDx_INT_EN;
4381 }
4382 if (rdev->irq.hpd[5]) {
4383 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4384 hpd6 |= DC_HPDx_INT_EN;
4385 }
Alex Deucherf122c612012-03-30 08:59:57 -04004386 if (rdev->irq.afmt[0]) {
4387 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4388 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4389 }
4390 if (rdev->irq.afmt[1]) {
4391 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4392 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4393 }
4394 if (rdev->irq.afmt[2]) {
4395 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4396 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4397 }
4398 if (rdev->irq.afmt[3]) {
4399 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4400 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4401 }
4402 if (rdev->irq.afmt[4]) {
4403 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4404 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4405 }
4406 if (rdev->irq.afmt[5]) {
4407 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4408 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4409 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004410
Alex Deucher1b370782011-11-17 20:13:28 -05004411 if (rdev->family >= CHIP_CAYMAN) {
4412 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4413 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4414 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4415 } else
4416 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004417
4418 WREG32(DMA_CNTL, dma_cntl);
4419
Alex Deucherf60cbd12012-12-04 15:27:33 -05004420 if (rdev->family >= CHIP_CAYMAN)
4421 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4422
Alex Deucher2031f772010-04-22 12:52:11 -04004423 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04004424
4425 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4426 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
Alex Deucherb7eff392011-07-08 11:44:56 -04004427 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004428 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4429 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
Alex Deucherb7eff392011-07-08 11:44:56 -04004430 }
4431 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004432 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4433 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4434 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004435
Alex Deucher6f34be52010-11-21 10:59:01 -05004436 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4437 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
Alex Deucherb7eff392011-07-08 11:44:56 -04004438 if (rdev->num_crtc >= 4) {
4439 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4440 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4441 }
4442 if (rdev->num_crtc >= 6) {
4443 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4444 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4445 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004446
Alex Deucher45f9a392010-03-24 13:55:51 -04004447 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4448 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4449 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4450 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4451 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4452 WREG32(DC_HPD6_INT_CONTROL, hpd6);
Alex Deucherd70229f2013-04-12 16:40:41 -04004453 if (rdev->family == CHIP_ARUBA)
4454 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4455 else
4456 WREG32(CG_THERMAL_INT, thermal_int);
Alex Deucher45f9a392010-03-24 13:55:51 -04004457
Alex Deucherf122c612012-03-30 08:59:57 -04004458 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4459 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4460 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4461 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4462 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4463 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4464
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004465 return 0;
4466}
4467
Andi Kleencbdd4502011-10-13 16:08:46 -07004468static void evergreen_irq_ack(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004469{
4470 u32 tmp;
4471
Alex Deucher6f34be52010-11-21 10:59:01 -05004472 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4473 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4474 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4475 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4476 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4477 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4478 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4479 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
Alex Deucherb7eff392011-07-08 11:44:56 -04004480 if (rdev->num_crtc >= 4) {
4481 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4482 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4483 }
4484 if (rdev->num_crtc >= 6) {
4485 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4486 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4487 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004488
Alex Deucherf122c612012-03-30 08:59:57 -04004489 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4490 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4491 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4492 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4493 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4494 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4495
Alex Deucher6f34be52010-11-21 10:59:01 -05004496 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4497 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4498 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4499 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
Alex Deucher6f34be52010-11-21 10:59:01 -05004500 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004501 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004502 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004503 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004504 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004505 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004506 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004507 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4508
Alex Deucherb7eff392011-07-08 11:44:56 -04004509 if (rdev->num_crtc >= 4) {
4510 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4511 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4512 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4513 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4514 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4515 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4516 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4517 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4518 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4519 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4520 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4521 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4522 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004523
Alex Deucherb7eff392011-07-08 11:44:56 -04004524 if (rdev->num_crtc >= 6) {
4525 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4526 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4527 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4528 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4529 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4530 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4531 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4532 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4533 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4534 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4535 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4536 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4537 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004538
Alex Deucher6f34be52010-11-21 10:59:01 -05004539 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004540 tmp = RREG32(DC_HPD1_INT_CONTROL);
4541 tmp |= DC_HPDx_INT_ACK;
4542 WREG32(DC_HPD1_INT_CONTROL, tmp);
4543 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004544 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004545 tmp = RREG32(DC_HPD2_INT_CONTROL);
4546 tmp |= DC_HPDx_INT_ACK;
4547 WREG32(DC_HPD2_INT_CONTROL, tmp);
4548 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004549 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004550 tmp = RREG32(DC_HPD3_INT_CONTROL);
4551 tmp |= DC_HPDx_INT_ACK;
4552 WREG32(DC_HPD3_INT_CONTROL, tmp);
4553 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004554 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004555 tmp = RREG32(DC_HPD4_INT_CONTROL);
4556 tmp |= DC_HPDx_INT_ACK;
4557 WREG32(DC_HPD4_INT_CONTROL, tmp);
4558 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004559 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004560 tmp = RREG32(DC_HPD5_INT_CONTROL);
4561 tmp |= DC_HPDx_INT_ACK;
4562 WREG32(DC_HPD5_INT_CONTROL, tmp);
4563 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004564 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004565 tmp = RREG32(DC_HPD5_INT_CONTROL);
4566 tmp |= DC_HPDx_INT_ACK;
4567 WREG32(DC_HPD6_INT_CONTROL, tmp);
4568 }
Alex Deucherf122c612012-03-30 08:59:57 -04004569 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4570 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4571 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4572 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4573 }
4574 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4575 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4576 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4577 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4578 }
4579 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4580 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4581 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4582 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4583 }
4584 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4585 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4586 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4587 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4588 }
4589 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4590 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4591 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4592 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4593 }
4594 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4595 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4596 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4597 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4598 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004599}
4600
Lauri Kasanen1109ca02012-08-31 13:43:50 -04004601static void evergreen_irq_disable(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004602{
Alex Deucher45f9a392010-03-24 13:55:51 -04004603 r600_disable_interrupts(rdev);
4604 /* Wait and acknowledge irq */
4605 mdelay(1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004606 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004607 evergreen_disable_interrupt_state(rdev);
4608}
4609
Alex Deucher755d8192011-03-02 20:07:34 -05004610void evergreen_irq_suspend(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004611{
4612 evergreen_irq_disable(rdev);
4613 r600_rlc_stop(rdev);
4614}
4615
Andi Kleencbdd4502011-10-13 16:08:46 -07004616static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004617{
4618 u32 wptr, tmp;
4619
Alex Deucher724c80e2010-08-27 18:25:25 -04004620 if (rdev->wb.enabled)
Cédric Cano204ae242011-04-19 11:07:13 -04004621 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
Alex Deucher724c80e2010-08-27 18:25:25 -04004622 else
4623 wptr = RREG32(IH_RB_WPTR);
Alex Deucher45f9a392010-03-24 13:55:51 -04004624
4625 if (wptr & RB_OVERFLOW) {
4626 /* When a ring buffer overflow happen start parsing interrupt
4627 * from the last not overwritten vector (wptr + 16). Hopefully
4628 * this should allow us to catchup.
4629 */
4630 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4631 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4632 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4633 tmp = RREG32(IH_RB_CNTL);
4634 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4635 WREG32(IH_RB_CNTL, tmp);
4636 }
4637 return (wptr & rdev->ih.ptr_mask);
4638}
4639
4640int evergreen_irq_process(struct radeon_device *rdev)
4641{
Dave Airlie682f1a52011-06-18 03:59:51 +00004642 u32 wptr;
4643 u32 rptr;
Alex Deucher45f9a392010-03-24 13:55:51 -04004644 u32 src_id, src_data;
4645 u32 ring_index;
Alex Deucher45f9a392010-03-24 13:55:51 -04004646 bool queue_hotplug = false;
Alex Deucherf122c612012-03-30 08:59:57 -04004647 bool queue_hdmi = false;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004648 bool queue_thermal = false;
Alex Deucher54e2e492013-06-13 18:26:25 -04004649 u32 status, addr;
Alex Deucher45f9a392010-03-24 13:55:51 -04004650
Dave Airlie682f1a52011-06-18 03:59:51 +00004651 if (!rdev->ih.enabled || rdev->shutdown)
Alex Deucher45f9a392010-03-24 13:55:51 -04004652 return IRQ_NONE;
4653
Dave Airlie682f1a52011-06-18 03:59:51 +00004654 wptr = evergreen_get_ih_wptr(rdev);
Christian Koenigc20dc362012-05-16 21:45:24 +02004655
4656restart_ih:
4657 /* is somebody else already processing irqs? */
4658 if (atomic_xchg(&rdev->ih.lock, 1))
4659 return IRQ_NONE;
4660
Dave Airlie682f1a52011-06-18 03:59:51 +00004661 rptr = rdev->ih.rptr;
4662 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
Alex Deucher45f9a392010-03-24 13:55:51 -04004663
Benjamin Herrenschmidt964f6642011-07-13 16:28:19 +10004664 /* Order reading of wptr vs. reading of IH ring data */
4665 rmb();
4666
Alex Deucher45f9a392010-03-24 13:55:51 -04004667 /* display interrupts */
Alex Deucher6f34be52010-11-21 10:59:01 -05004668 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004669
Alex Deucher45f9a392010-03-24 13:55:51 -04004670 while (rptr != wptr) {
4671 /* wptr/rptr are in bytes! */
4672 ring_index = rptr / 4;
Alex Deucher0f234f5f2011-02-13 19:06:33 -05004673 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4674 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
Alex Deucher45f9a392010-03-24 13:55:51 -04004675
4676 switch (src_id) {
4677 case 1: /* D1 vblank/vline */
4678 switch (src_data) {
4679 case 0: /* D1 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004680 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004681 if (rdev->irq.crtc_vblank_int[0]) {
4682 drm_handle_vblank(rdev->ddev, 0);
4683 rdev->pm.vblank_sync = true;
4684 wake_up(&rdev->irq.vblank_queue);
4685 }
Christian Koenig736fc372012-05-17 19:52:00 +02004686 if (atomic_read(&rdev->irq.pflip[0]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004687 radeon_crtc_handle_flip(rdev, 0);
Alex Deucher6f34be52010-11-21 10:59:01 -05004688 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004689 DRM_DEBUG("IH: D1 vblank\n");
4690 }
4691 break;
4692 case 1: /* D1 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004693 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4694 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004695 DRM_DEBUG("IH: D1 vline\n");
4696 }
4697 break;
4698 default:
4699 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4700 break;
4701 }
4702 break;
4703 case 2: /* D2 vblank/vline */
4704 switch (src_data) {
4705 case 0: /* D2 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004706 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004707 if (rdev->irq.crtc_vblank_int[1]) {
4708 drm_handle_vblank(rdev->ddev, 1);
4709 rdev->pm.vblank_sync = true;
4710 wake_up(&rdev->irq.vblank_queue);
4711 }
Christian Koenig736fc372012-05-17 19:52:00 +02004712 if (atomic_read(&rdev->irq.pflip[1]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004713 radeon_crtc_handle_flip(rdev, 1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004714 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004715 DRM_DEBUG("IH: D2 vblank\n");
4716 }
4717 break;
4718 case 1: /* D2 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004719 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4720 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004721 DRM_DEBUG("IH: D2 vline\n");
4722 }
4723 break;
4724 default:
4725 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4726 break;
4727 }
4728 break;
4729 case 3: /* D3 vblank/vline */
4730 switch (src_data) {
4731 case 0: /* D3 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004732 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4733 if (rdev->irq.crtc_vblank_int[2]) {
4734 drm_handle_vblank(rdev->ddev, 2);
4735 rdev->pm.vblank_sync = true;
4736 wake_up(&rdev->irq.vblank_queue);
4737 }
Christian Koenig736fc372012-05-17 19:52:00 +02004738 if (atomic_read(&rdev->irq.pflip[2]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004739 radeon_crtc_handle_flip(rdev, 2);
4740 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004741 DRM_DEBUG("IH: D3 vblank\n");
4742 }
4743 break;
4744 case 1: /* D3 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004745 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4746 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004747 DRM_DEBUG("IH: D3 vline\n");
4748 }
4749 break;
4750 default:
4751 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4752 break;
4753 }
4754 break;
4755 case 4: /* D4 vblank/vline */
4756 switch (src_data) {
4757 case 0: /* D4 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004758 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4759 if (rdev->irq.crtc_vblank_int[3]) {
4760 drm_handle_vblank(rdev->ddev, 3);
4761 rdev->pm.vblank_sync = true;
4762 wake_up(&rdev->irq.vblank_queue);
4763 }
Christian Koenig736fc372012-05-17 19:52:00 +02004764 if (atomic_read(&rdev->irq.pflip[3]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004765 radeon_crtc_handle_flip(rdev, 3);
4766 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004767 DRM_DEBUG("IH: D4 vblank\n");
4768 }
4769 break;
4770 case 1: /* D4 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004771 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4772 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004773 DRM_DEBUG("IH: D4 vline\n");
4774 }
4775 break;
4776 default:
4777 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4778 break;
4779 }
4780 break;
4781 case 5: /* D5 vblank/vline */
4782 switch (src_data) {
4783 case 0: /* D5 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004784 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4785 if (rdev->irq.crtc_vblank_int[4]) {
4786 drm_handle_vblank(rdev->ddev, 4);
4787 rdev->pm.vblank_sync = true;
4788 wake_up(&rdev->irq.vblank_queue);
4789 }
Christian Koenig736fc372012-05-17 19:52:00 +02004790 if (atomic_read(&rdev->irq.pflip[4]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004791 radeon_crtc_handle_flip(rdev, 4);
4792 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004793 DRM_DEBUG("IH: D5 vblank\n");
4794 }
4795 break;
4796 case 1: /* D5 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004797 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4798 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004799 DRM_DEBUG("IH: D5 vline\n");
4800 }
4801 break;
4802 default:
4803 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4804 break;
4805 }
4806 break;
4807 case 6: /* D6 vblank/vline */
4808 switch (src_data) {
4809 case 0: /* D6 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004810 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4811 if (rdev->irq.crtc_vblank_int[5]) {
4812 drm_handle_vblank(rdev->ddev, 5);
4813 rdev->pm.vblank_sync = true;
4814 wake_up(&rdev->irq.vblank_queue);
4815 }
Christian Koenig736fc372012-05-17 19:52:00 +02004816 if (atomic_read(&rdev->irq.pflip[5]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004817 radeon_crtc_handle_flip(rdev, 5);
4818 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004819 DRM_DEBUG("IH: D6 vblank\n");
4820 }
4821 break;
4822 case 1: /* D6 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004823 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4824 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004825 DRM_DEBUG("IH: D6 vline\n");
4826 }
4827 break;
4828 default:
4829 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4830 break;
4831 }
4832 break;
4833 case 42: /* HPD hotplug */
4834 switch (src_data) {
4835 case 0:
Alex Deucher6f34be52010-11-21 10:59:01 -05004836 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4837 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004838 queue_hotplug = true;
4839 DRM_DEBUG("IH: HPD1\n");
4840 }
4841 break;
4842 case 1:
Alex Deucher6f34be52010-11-21 10:59:01 -05004843 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4844 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004845 queue_hotplug = true;
4846 DRM_DEBUG("IH: HPD2\n");
4847 }
4848 break;
4849 case 2:
Alex Deucher6f34be52010-11-21 10:59:01 -05004850 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4851 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004852 queue_hotplug = true;
4853 DRM_DEBUG("IH: HPD3\n");
4854 }
4855 break;
4856 case 3:
Alex Deucher6f34be52010-11-21 10:59:01 -05004857 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4858 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004859 queue_hotplug = true;
4860 DRM_DEBUG("IH: HPD4\n");
4861 }
4862 break;
4863 case 4:
Alex Deucher6f34be52010-11-21 10:59:01 -05004864 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4865 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004866 queue_hotplug = true;
4867 DRM_DEBUG("IH: HPD5\n");
4868 }
4869 break;
4870 case 5:
Alex Deucher6f34be52010-11-21 10:59:01 -05004871 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4872 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004873 queue_hotplug = true;
4874 DRM_DEBUG("IH: HPD6\n");
4875 }
4876 break;
4877 default:
4878 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4879 break;
4880 }
4881 break;
Alex Deucherf122c612012-03-30 08:59:57 -04004882 case 44: /* hdmi */
4883 switch (src_data) {
4884 case 0:
4885 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4886 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4887 queue_hdmi = true;
4888 DRM_DEBUG("IH: HDMI0\n");
4889 }
4890 break;
4891 case 1:
4892 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4893 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4894 queue_hdmi = true;
4895 DRM_DEBUG("IH: HDMI1\n");
4896 }
4897 break;
4898 case 2:
4899 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4900 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4901 queue_hdmi = true;
4902 DRM_DEBUG("IH: HDMI2\n");
4903 }
4904 break;
4905 case 3:
4906 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4907 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4908 queue_hdmi = true;
4909 DRM_DEBUG("IH: HDMI3\n");
4910 }
4911 break;
4912 case 4:
4913 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4914 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4915 queue_hdmi = true;
4916 DRM_DEBUG("IH: HDMI4\n");
4917 }
4918 break;
4919 case 5:
4920 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4921 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4922 queue_hdmi = true;
4923 DRM_DEBUG("IH: HDMI5\n");
4924 }
4925 break;
4926 default:
4927 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4928 break;
4929 }
Christian Königf2ba57b2013-04-08 12:41:29 +02004930 case 124: /* UVD */
4931 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4932 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
Alex Deucherf122c612012-03-30 08:59:57 -04004933 break;
Christian Königae133a12012-09-18 15:30:44 -04004934 case 146:
4935 case 147:
Alex Deucher54e2e492013-06-13 18:26:25 -04004936 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4937 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
Christian Königae133a12012-09-18 15:30:44 -04004938 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4939 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
Alex Deucher54e2e492013-06-13 18:26:25 -04004940 addr);
Christian Königae133a12012-09-18 15:30:44 -04004941 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
Alex Deucher54e2e492013-06-13 18:26:25 -04004942 status);
4943 cayman_vm_decode_fault(rdev, status, addr);
Christian Königae133a12012-09-18 15:30:44 -04004944 /* reset addr and status */
4945 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4946 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04004947 case 176: /* CP_INT in ring buffer */
4948 case 177: /* CP_INT in IB1 */
4949 case 178: /* CP_INT in IB2 */
4950 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
Alex Deucher74652802011-08-25 13:39:48 -04004951 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004952 break;
4953 case 181: /* CP EOP event */
4954 DRM_DEBUG("IH: CP EOP\n");
Alex Deucher1b370782011-11-17 20:13:28 -05004955 if (rdev->family >= CHIP_CAYMAN) {
4956 switch (src_data) {
4957 case 0:
4958 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4959 break;
4960 case 1:
4961 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4962 break;
4963 case 2:
4964 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4965 break;
4966 }
4967 } else
4968 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004969 break;
Alex Deucher233d1ad2012-12-04 15:25:59 -05004970 case 224: /* DMA trap event */
4971 DRM_DEBUG("IH: DMA trap\n");
4972 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4973 break;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004974 case 230: /* thermal low to high */
4975 DRM_DEBUG("IH: thermal low to high\n");
4976 rdev->pm.dpm.thermal.high_to_low = false;
4977 queue_thermal = true;
4978 break;
4979 case 231: /* thermal high to low */
4980 DRM_DEBUG("IH: thermal high to low\n");
4981 rdev->pm.dpm.thermal.high_to_low = true;
4982 queue_thermal = true;
4983 break;
Alex Deucher2031f772010-04-22 12:52:11 -04004984 case 233: /* GUI IDLE */
Ilija Hadzic303c8052011-06-07 14:54:48 -04004985 DRM_DEBUG("IH: GUI idle\n");
Alex Deucher2031f772010-04-22 12:52:11 -04004986 break;
Alex Deucherf60cbd12012-12-04 15:27:33 -05004987 case 244: /* DMA trap event */
4988 if (rdev->family >= CHIP_CAYMAN) {
4989 DRM_DEBUG("IH: DMA1 trap\n");
4990 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4991 }
4992 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04004993 default:
4994 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4995 break;
4996 }
4997
4998 /* wptr/rptr are in bytes! */
4999 rptr += 16;
5000 rptr &= rdev->ih.ptr_mask;
5001 }
Alex Deucher45f9a392010-03-24 13:55:51 -04005002 if (queue_hotplug)
Tejun Heo32c87fc2011-01-03 14:49:32 +01005003 schedule_work(&rdev->hotplug_work);
Alex Deucherf122c612012-03-30 08:59:57 -04005004 if (queue_hdmi)
5005 schedule_work(&rdev->audio_work);
Alex Deucherdc50ba72013-06-26 00:33:35 -04005006 if (queue_thermal && rdev->pm.dpm_enabled)
5007 schedule_work(&rdev->pm.dpm.thermal.work);
Alex Deucher45f9a392010-03-24 13:55:51 -04005008 rdev->ih.rptr = rptr;
5009 WREG32(IH_RB_RPTR, rdev->ih.rptr);
Christian Koenigc20dc362012-05-16 21:45:24 +02005010 atomic_set(&rdev->ih.lock, 0);
5011
5012 /* make sure wptr hasn't changed while processing */
5013 wptr = evergreen_get_ih_wptr(rdev);
5014 if (wptr != rptr)
5015 goto restart_ih;
5016
Alex Deucher45f9a392010-03-24 13:55:51 -04005017 return IRQ_HANDLED;
5018}
5019
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005020static int evergreen_startup(struct radeon_device *rdev)
5021{
Christian Königf2ba57b2013-04-08 12:41:29 +02005022 struct radeon_ring *ring;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005023 int r;
5024
Alex Deucher9e46a482011-01-06 18:49:35 -05005025 /* enable pcie gen2 link */
Ilija Hadziccd540332011-09-20 10:22:57 -04005026 evergreen_pcie_gen2_enable(rdev);
Alex Deucherf52382d2013-02-15 11:02:50 -05005027 /* enable aspm */
5028 evergreen_program_aspm(rdev);
Alex Deucher9e46a482011-01-06 18:49:35 -05005029
Alex Deucher6fab3feb2013-08-04 12:13:17 -04005030 evergreen_mc_program(rdev);
5031
Alex Deucher0af62b02011-01-06 21:19:31 -05005032 if (ASIC_IS_DCE5(rdev)) {
5033 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5034 r = ni_init_microcode(rdev);
5035 if (r) {
5036 DRM_ERROR("Failed to load firmware!\n");
5037 return r;
5038 }
5039 }
Alex Deucher755d8192011-03-02 20:07:34 -05005040 r = ni_mc_load_microcode(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005041 if (r) {
Alex Deucher0af62b02011-01-06 21:19:31 -05005042 DRM_ERROR("Failed to load MC firmware!\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005043 return r;
5044 }
Alex Deucher0af62b02011-01-06 21:19:31 -05005045 } else {
5046 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5047 r = r600_init_microcode(rdev);
5048 if (r) {
5049 DRM_ERROR("Failed to load firmware!\n");
5050 return r;
5051 }
5052 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005053 }
Alex Deucherfe251e22010-03-24 13:36:43 -04005054
Alex Deucher16cdf042011-10-28 10:30:02 -04005055 r = r600_vram_scratch_init(rdev);
5056 if (r)
5057 return r;
5058
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005059 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04005060 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005061 } else {
5062 r = evergreen_pcie_gart_enable(rdev);
5063 if (r)
5064 return r;
5065 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005066 evergreen_gpu_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005067
Alex Deucher2948f5e2013-04-12 13:52:52 -04005068 /* allocate rlc buffers */
5069 if (rdev->flags & RADEON_IS_IGP) {
5070 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
Alex Deucher1fd11772013-04-17 17:53:50 -04005071 rdev->rlc.reg_list_size =
5072 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005073 rdev->rlc.cs_data = evergreen_cs_data;
5074 r = sumo_rlc_init(rdev);
5075 if (r) {
5076 DRM_ERROR("Failed to init rlc BOs!\n");
5077 return r;
5078 }
5079 }
5080
Alex Deucher724c80e2010-08-27 18:25:25 -04005081 /* allocate wb buffer */
5082 r = radeon_wb_init(rdev);
5083 if (r)
5084 return r;
5085
Jerome Glisse30eb77f2011-11-20 20:45:34 +00005086 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5087 if (r) {
5088 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5089 return r;
5090 }
5091
Alex Deucher233d1ad2012-12-04 15:25:59 -05005092 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5093 if (r) {
5094 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5095 return r;
5096 }
5097
Christian Könige409b122013-08-13 11:56:53 +02005098 r = uvd_v2_2_resume(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005099 if (!r) {
5100 r = radeon_fence_driver_start_ring(rdev,
5101 R600_RING_TYPE_UVD_INDEX);
5102 if (r)
5103 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5104 }
5105
5106 if (r)
5107 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5108
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005109 /* Enable IRQ */
Adis Hamziće49f3952013-06-02 16:47:54 +02005110 if (!rdev->irq.installed) {
5111 r = radeon_irq_kms_init(rdev);
5112 if (r)
5113 return r;
5114 }
5115
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005116 r = r600_irq_init(rdev);
5117 if (r) {
5118 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5119 radeon_irq_kms_fini(rdev);
5120 return r;
5121 }
Alex Deucher45f9a392010-03-24 13:55:51 -04005122 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005123
Christian Königf2ba57b2013-04-08 12:41:29 +02005124 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Christian Könige32eb502011-10-23 12:56:27 +02005125 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
Alex Deucher78c55602011-11-17 14:25:56 -05005126 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005127 RADEON_CP_PACKET2);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005128 if (r)
5129 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05005130
5131 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5132 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5133 DMA_RB_RPTR, DMA_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005134 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05005135 if (r)
5136 return r;
5137
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005138 r = evergreen_cp_load_microcode(rdev);
5139 if (r)
5140 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04005141 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005142 if (r)
5143 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05005144 r = r600_dma_resume(rdev);
5145 if (r)
5146 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04005147
Christian Königf2ba57b2013-04-08 12:41:29 +02005148 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5149 if (ring->ring_size) {
Christian König02c9f7f2013-08-13 11:56:51 +02005150 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
Christian Königf2ba57b2013-04-08 12:41:29 +02005151 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005152 RADEON_CP_PACKET2);
Christian Königf2ba57b2013-04-08 12:41:29 +02005153 if (!r)
Christian Könige409b122013-08-13 11:56:53 +02005154 r = uvd_v1_0_init(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005155
5156 if (r)
5157 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5158 }
5159
Christian König2898c342012-07-05 11:55:34 +02005160 r = radeon_ib_pool_init(rdev);
5161 if (r) {
5162 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
Jerome Glisseb15ba512011-11-15 11:48:34 -05005163 return r;
Christian König2898c342012-07-05 11:55:34 +02005164 }
Jerome Glisseb15ba512011-11-15 11:48:34 -05005165
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005166 r = r600_audio_init(rdev);
5167 if (r) {
5168 DRM_ERROR("radeon: audio init failed\n");
Jerome Glisseb15ba512011-11-15 11:48:34 -05005169 return r;
5170 }
5171
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005172 return 0;
5173}
5174
5175int evergreen_resume(struct radeon_device *rdev)
5176{
5177 int r;
5178
Alex Deucher86f5c9e2010-12-20 12:35:04 -05005179 /* reset the asic, the gfx blocks are often in a bad state
5180 * after the driver is unloaded or after a resume
5181 */
5182 if (radeon_asic_reset(rdev))
5183 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005184 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5185 * posting will perform necessary task to bring back GPU into good
5186 * shape.
5187 */
5188 /* post card */
5189 atom_asic_init(rdev->mode_info.atom_context);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005190
Alex Deucherd4788db2013-02-28 14:40:09 -05005191 /* init golden registers */
5192 evergreen_init_golden_registers(rdev);
5193
Jerome Glisseb15ba512011-11-15 11:48:34 -05005194 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005195 r = evergreen_startup(rdev);
5196 if (r) {
Alex Deucher755d8192011-03-02 20:07:34 -05005197 DRM_ERROR("evergreen startup failed on resume\n");
Jerome Glisse6b7746e2012-02-20 17:57:20 -05005198 rdev->accel_working = false;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005199 return r;
5200 }
Alex Deucherfe251e22010-03-24 13:36:43 -04005201
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005202 return r;
5203
5204}
5205
5206int evergreen_suspend(struct radeon_device *rdev)
5207{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005208 r600_audio_fini(rdev);
Christian Könige409b122013-08-13 11:56:53 +02005209 uvd_v1_0_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005210 radeon_uvd_suspend(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005211 r700_cp_stop(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005212 r600_dma_stop(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04005213 evergreen_irq_suspend(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005214 radeon_wb_disable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005215 evergreen_pcie_gart_disable(rdev);
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04005216
5217 return 0;
5218}
5219
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005220/* Plan is to move initialization in that function and use
5221 * helper function so that radeon_device_init pretty much
5222 * do nothing more than calling asic specific function. This
5223 * should also allow to remove a bunch of callback function
5224 * like vram_info.
5225 */
5226int evergreen_init(struct radeon_device *rdev)
5227{
5228 int r;
5229
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005230 /* Read BIOS */
5231 if (!radeon_get_bios(rdev)) {
5232 if (ASIC_IS_AVIVO(rdev))
5233 return -EINVAL;
5234 }
5235 /* Must be an ATOMBIOS */
5236 if (!rdev->is_atom_bios) {
Alex Deucher755d8192011-03-02 20:07:34 -05005237 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005238 return -EINVAL;
5239 }
5240 r = radeon_atombios_init(rdev);
5241 if (r)
5242 return r;
Alex Deucher86f5c9e2010-12-20 12:35:04 -05005243 /* reset the asic, the gfx blocks are often in a bad state
5244 * after the driver is unloaded or after a resume
5245 */
5246 if (radeon_asic_reset(rdev))
5247 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005248 /* Post card if necessary */
Alex Deucherfd909c32011-01-11 18:08:59 -05005249 if (!radeon_card_posted(rdev)) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005250 if (!rdev->bios) {
5251 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5252 return -EINVAL;
5253 }
5254 DRM_INFO("GPU not posted. posting now...\n");
5255 atom_asic_init(rdev->mode_info.atom_context);
5256 }
Alex Deucherd4788db2013-02-28 14:40:09 -05005257 /* init golden registers */
5258 evergreen_init_golden_registers(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005259 /* Initialize scratch registers */
5260 r600_scratch_init(rdev);
5261 /* Initialize surface registers */
5262 radeon_surface_init(rdev);
5263 /* Initialize clocks */
5264 radeon_get_clock_info(rdev->ddev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005265 /* Fence driver */
5266 r = radeon_fence_driver_init(rdev);
5267 if (r)
5268 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00005269 /* initialize AGP */
5270 if (rdev->flags & RADEON_IS_AGP) {
5271 r = radeon_agp_init(rdev);
5272 if (r)
5273 radeon_agp_disable(rdev);
5274 }
5275 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005276 r = evergreen_mc_init(rdev);
5277 if (r)
5278 return r;
5279 /* Memory manager */
5280 r = radeon_bo_init(rdev);
5281 if (r)
5282 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04005283
Christian Könige32eb502011-10-23 12:56:27 +02005284 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5285 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005286
Alex Deucher233d1ad2012-12-04 15:25:59 -05005287 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5288 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5289
Christian Königf2ba57b2013-04-08 12:41:29 +02005290 r = radeon_uvd_init(rdev);
5291 if (!r) {
5292 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5293 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5294 4096);
5295 }
5296
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005297 rdev->ih.ring_obj = NULL;
5298 r600_ih_ring_init(rdev, 64 * 1024);
5299
5300 r = r600_pcie_gart_init(rdev);
5301 if (r)
5302 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04005303
Alex Deucher148a03b2010-06-03 19:00:03 -04005304 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005305 r = evergreen_startup(rdev);
5306 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04005307 dev_err(rdev->dev, "disabling GPU acceleration\n");
5308 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005309 r600_dma_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04005310 r600_irq_fini(rdev);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005311 if (rdev->flags & RADEON_IS_IGP)
5312 sumo_rlc_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005313 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02005314 radeon_ib_pool_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04005315 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04005316 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005317 rdev->accel_working = false;
5318 }
Alex Deucher77e00f22011-12-21 11:58:17 -05005319
5320 /* Don't start up if the MC ucode is missing on BTC parts.
5321 * The default clocks and voltages before the MC ucode
5322 * is loaded are not suffient for advanced operations.
5323 */
5324 if (ASIC_IS_DCE5(rdev)) {
5325 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5326 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5327 return -EINVAL;
5328 }
5329 }
5330
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005331 return 0;
5332}
5333
5334void evergreen_fini(struct radeon_device *rdev)
5335{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005336 r600_audio_fini(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04005337 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005338 r600_dma_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005339 r600_irq_fini(rdev);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005340 if (rdev->flags & RADEON_IS_IGP)
5341 sumo_rlc_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005342 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02005343 radeon_ib_pool_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005344 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005345 evergreen_pcie_gart_fini(rdev);
Christian Könige409b122013-08-13 11:56:53 +02005346 uvd_v1_0_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005347 radeon_uvd_fini(rdev);
Alex Deucher16cdf042011-10-28 10:30:02 -04005348 r600_vram_scratch_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005349 radeon_gem_fini(rdev);
5350 radeon_fence_driver_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005351 radeon_agp_fini(rdev);
5352 radeon_bo_fini(rdev);
5353 radeon_atombios_fini(rdev);
5354 kfree(rdev->bios);
5355 rdev->bios = NULL;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005356}
Alex Deucher9e46a482011-01-06 18:49:35 -05005357
Ilija Hadzicb07759b2011-09-20 10:22:58 -04005358void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
Alex Deucher9e46a482011-01-06 18:49:35 -05005359{
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005360 u32 link_width_cntl, speed_cntl;
Alex Deucher9e46a482011-01-06 18:49:35 -05005361
Alex Deucherd42dd572011-01-12 20:05:11 -05005362 if (radeon_pcie_gen2 == 0)
5363 return;
5364
Alex Deucher9e46a482011-01-06 18:49:35 -05005365 if (rdev->flags & RADEON_IS_IGP)
5366 return;
5367
5368 if (!(rdev->flags & RADEON_IS_PCIE))
5369 return;
5370
5371 /* x2 cards have a special sequence */
5372 if (ASIC_IS_X2(rdev))
5373 return;
5374
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005375 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5376 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
Dave Airlie197bbb32012-06-27 08:35:54 +01005377 return;
5378
Alex Deucher492d2b62012-10-25 16:06:59 -04005379 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher3691fee2012-10-08 17:46:27 -04005380 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5381 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5382 return;
5383 }
5384
Dave Airlie197bbb32012-06-27 08:35:54 +01005385 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5386
Alex Deucher9e46a482011-01-06 18:49:35 -05005387 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5388 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5389
Alex Deucher492d2b62012-10-25 16:06:59 -04005390 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005391 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005392 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005393
Alex Deucher492d2b62012-10-25 16:06:59 -04005394 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005395 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
Alex Deucher492d2b62012-10-25 16:06:59 -04005396 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005397
Alex Deucher492d2b62012-10-25 16:06:59 -04005398 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005399 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005400 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005401
Alex Deucher492d2b62012-10-25 16:06:59 -04005402 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005403 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005404 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005405
Alex Deucher492d2b62012-10-25 16:06:59 -04005406 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005407 speed_cntl |= LC_GEN2_EN_STRAP;
Alex Deucher492d2b62012-10-25 16:06:59 -04005408 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005409
5410 } else {
Alex Deucher492d2b62012-10-25 16:06:59 -04005411 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005412 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5413 if (1)
5414 link_width_cntl |= LC_UPCONFIGURE_DIS;
5415 else
5416 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005417 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005418 }
5419}
Alex Deucherf52382d2013-02-15 11:02:50 -05005420
5421void evergreen_program_aspm(struct radeon_device *rdev)
5422{
5423 u32 data, orig;
5424 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5425 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5426 /* fusion_platform = true
5427 * if the system is a fusion system
5428 * (APU or DGPU in a fusion system).
5429 * todo: check if the system is a fusion platform.
5430 */
5431 bool fusion_platform = false;
5432
Alex Deucher1294d4a2013-07-16 15:58:50 -04005433 if (radeon_aspm == 0)
5434 return;
5435
Alex Deucherf52382d2013-02-15 11:02:50 -05005436 if (!(rdev->flags & RADEON_IS_PCIE))
5437 return;
5438
5439 switch (rdev->family) {
5440 case CHIP_CYPRESS:
5441 case CHIP_HEMLOCK:
5442 case CHIP_JUNIPER:
5443 case CHIP_REDWOOD:
5444 case CHIP_CEDAR:
5445 case CHIP_SUMO:
5446 case CHIP_SUMO2:
5447 case CHIP_PALM:
5448 case CHIP_ARUBA:
5449 disable_l0s = true;
5450 break;
5451 default:
5452 disable_l0s = false;
5453 break;
5454 }
5455
5456 if (rdev->flags & RADEON_IS_IGP)
5457 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5458
5459 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5460 if (fusion_platform)
5461 data &= ~MULTI_PIF;
5462 else
5463 data |= MULTI_PIF;
5464 if (data != orig)
5465 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5466
5467 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5468 if (fusion_platform)
5469 data &= ~MULTI_PIF;
5470 else
5471 data |= MULTI_PIF;
5472 if (data != orig)
5473 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5474
5475 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5476 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5477 if (!disable_l0s) {
5478 if (rdev->family >= CHIP_BARTS)
5479 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5480 else
5481 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5482 }
5483
5484 if (!disable_l1) {
5485 if (rdev->family >= CHIP_BARTS)
5486 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5487 else
5488 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5489
5490 if (!disable_plloff_in_l1) {
5491 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5492 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5493 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5494 if (data != orig)
5495 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5496
5497 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5498 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5499 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5500 if (data != orig)
5501 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5502
5503 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5504 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5505 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5506 if (data != orig)
5507 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5508
5509 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5510 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5511 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5512 if (data != orig)
5513 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5514
5515 if (rdev->family >= CHIP_BARTS) {
5516 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5517 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5518 data |= PLL_RAMP_UP_TIME_0(4);
5519 if (data != orig)
5520 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5521
5522 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5523 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5524 data |= PLL_RAMP_UP_TIME_1(4);
5525 if (data != orig)
5526 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5527
5528 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5529 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5530 data |= PLL_RAMP_UP_TIME_0(4);
5531 if (data != orig)
5532 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5533
5534 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5535 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5536 data |= PLL_RAMP_UP_TIME_1(4);
5537 if (data != orig)
5538 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5539 }
5540
5541 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5542 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5543 data |= LC_DYN_LANES_PWR_STATE(3);
5544 if (data != orig)
5545 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5546
5547 if (rdev->family >= CHIP_BARTS) {
5548 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5549 data &= ~LS2_EXIT_TIME_MASK;
5550 data |= LS2_EXIT_TIME(1);
5551 if (data != orig)
5552 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5553
5554 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5555 data &= ~LS2_EXIT_TIME_MASK;
5556 data |= LS2_EXIT_TIME(1);
5557 if (data != orig)
5558 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5559 }
5560 }
5561 }
5562
5563 /* evergreen parts only */
5564 if (rdev->family < CHIP_BARTS)
5565 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5566
5567 if (pcie_lc_cntl != pcie_lc_cntl_old)
5568 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5569}