blob: e64a199b5ee1c7b62ae81ad8a8e7d23bf2deb9c8 [file] [log] [blame]
Jerome Glisse771fe6b2009-06-05 14:42:42 +02001/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26#include <drm/drmP.h>
27#include <drm/drm_crtc_helper.h>
28#include <drm/radeon_drm.h>
29#include "radeon_fixed.h"
30#include "radeon.h"
31#include "atom.h"
32#include "atom-bits.h"
33
34static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
35{
36 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
37 struct drm_device *dev = crtc->dev;
38 struct radeon_device *rdev = dev->dev_private;
39 int index =
40 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
41 ENABLE_CRTC_PS_ALLOCATION args;
42
43 memset(&args, 0, sizeof(args));
44
45 args.ucCRTC = radeon_crtc->crtc_id;
46 args.ucEnable = lock;
47
48 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
49}
50
51static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
52{
53 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
54 struct drm_device *dev = crtc->dev;
55 struct radeon_device *rdev = dev->dev_private;
56 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
57 ENABLE_CRTC_PS_ALLOCATION args;
58
59 memset(&args, 0, sizeof(args));
60
61 args.ucCRTC = radeon_crtc->crtc_id;
62 args.ucEnable = state;
63
64 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
65}
66
67static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
68{
69 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
70 struct drm_device *dev = crtc->dev;
71 struct radeon_device *rdev = dev->dev_private;
72 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
73 ENABLE_CRTC_PS_ALLOCATION args;
74
75 memset(&args, 0, sizeof(args));
76
77 args.ucCRTC = radeon_crtc->crtc_id;
78 args.ucEnable = state;
79
80 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
81}
82
83static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
84{
85 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
86 struct drm_device *dev = crtc->dev;
87 struct radeon_device *rdev = dev->dev_private;
88 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
89 BLANK_CRTC_PS_ALLOCATION args;
90
91 memset(&args, 0, sizeof(args));
92
93 args.ucCRTC = radeon_crtc->crtc_id;
94 args.ucBlanking = state;
95
96 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
97}
98
99void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
100{
101 struct drm_device *dev = crtc->dev;
102 struct radeon_device *rdev = dev->dev_private;
103
104 switch (mode) {
105 case DRM_MODE_DPMS_ON:
106 if (ASIC_IS_DCE3(rdev))
107 atombios_enable_crtc_memreq(crtc, 1);
108 atombios_enable_crtc(crtc, 1);
109 atombios_blank_crtc(crtc, 0);
110 break;
111 case DRM_MODE_DPMS_STANDBY:
112 case DRM_MODE_DPMS_SUSPEND:
113 case DRM_MODE_DPMS_OFF:
114 atombios_blank_crtc(crtc, 1);
115 atombios_enable_crtc(crtc, 0);
116 if (ASIC_IS_DCE3(rdev))
117 atombios_enable_crtc_memreq(crtc, 0);
118 break;
119 }
120
121 if (mode != DRM_MODE_DPMS_OFF) {
122 radeon_crtc_load_lut(crtc);
123 }
124}
125
126static void
127atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
128 SET_CRTC_USING_DTD_TIMING_PARAMETERS * crtc_param)
129{
130 struct drm_device *dev = crtc->dev;
131 struct radeon_device *rdev = dev->dev_private;
132 SET_CRTC_USING_DTD_TIMING_PARAMETERS conv_param;
133 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
134
135 conv_param.usH_Size = cpu_to_le16(crtc_param->usH_Size);
136 conv_param.usH_Blanking_Time =
137 cpu_to_le16(crtc_param->usH_Blanking_Time);
138 conv_param.usV_Size = cpu_to_le16(crtc_param->usV_Size);
139 conv_param.usV_Blanking_Time =
140 cpu_to_le16(crtc_param->usV_Blanking_Time);
141 conv_param.usH_SyncOffset = cpu_to_le16(crtc_param->usH_SyncOffset);
142 conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
143 conv_param.usV_SyncOffset = cpu_to_le16(crtc_param->usV_SyncOffset);
144 conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
145 conv_param.susModeMiscInfo.usAccess =
146 cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
147 conv_param.ucCRTC = crtc_param->ucCRTC;
148
149 printk("executing set crtc dtd timing\n");
150 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
151}
152
153void atombios_crtc_set_timing(struct drm_crtc *crtc,
154 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION *
155 crtc_param)
156{
157 struct drm_device *dev = crtc->dev;
158 struct radeon_device *rdev = dev->dev_private;
159 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION conv_param;
160 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
161
162 conv_param.usH_Total = cpu_to_le16(crtc_param->usH_Total);
163 conv_param.usH_Disp = cpu_to_le16(crtc_param->usH_Disp);
164 conv_param.usH_SyncStart = cpu_to_le16(crtc_param->usH_SyncStart);
165 conv_param.usH_SyncWidth = cpu_to_le16(crtc_param->usH_SyncWidth);
166 conv_param.usV_Total = cpu_to_le16(crtc_param->usV_Total);
167 conv_param.usV_Disp = cpu_to_le16(crtc_param->usV_Disp);
168 conv_param.usV_SyncStart = cpu_to_le16(crtc_param->usV_SyncStart);
169 conv_param.usV_SyncWidth = cpu_to_le16(crtc_param->usV_SyncWidth);
170 conv_param.susModeMiscInfo.usAccess =
171 cpu_to_le16(crtc_param->susModeMiscInfo.usAccess);
172 conv_param.ucCRTC = crtc_param->ucCRTC;
173 conv_param.ucOverscanRight = crtc_param->ucOverscanRight;
174 conv_param.ucOverscanLeft = crtc_param->ucOverscanLeft;
175 conv_param.ucOverscanBottom = crtc_param->ucOverscanBottom;
176 conv_param.ucOverscanTop = crtc_param->ucOverscanTop;
177 conv_param.ucReserved = crtc_param->ucReserved;
178
179 printk("executing set crtc timing\n");
180 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&conv_param);
181}
182
183void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
184{
185 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
186 struct drm_device *dev = crtc->dev;
187 struct radeon_device *rdev = dev->dev_private;
188 struct drm_encoder *encoder = NULL;
189 struct radeon_encoder *radeon_encoder = NULL;
190 uint8_t frev, crev;
191 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
192 SET_PIXEL_CLOCK_PS_ALLOCATION args;
193 PIXEL_CLOCK_PARAMETERS *spc1_ptr;
194 PIXEL_CLOCK_PARAMETERS_V2 *spc2_ptr;
195 PIXEL_CLOCK_PARAMETERS_V3 *spc3_ptr;
196 uint32_t sclock = mode->clock;
197 uint32_t ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
198 struct radeon_pll *pll;
199 int pll_flags = 0;
200
201 memset(&args, 0, sizeof(args));
202
203 if (ASIC_IS_AVIVO(rdev)) {
204 uint32_t ss_cntl;
205
Alex Deuchereb1300b2009-07-13 11:09:56 -0400206 if ((rdev->family == CHIP_RS600) ||
207 (rdev->family == CHIP_RS690) ||
208 (rdev->family == CHIP_RS740))
209 pll_flags |= (RADEON_PLL_USE_FRAC_FB_DIV |
210 RADEON_PLL_PREFER_CLOSEST_LOWER);
211
Jerome Glisse771fe6b2009-06-05 14:42:42 +0200212 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
213 pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
214 else
215 pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
216
217 /* disable spread spectrum clocking for now -- thanks Hedy Lamarr */
218 if (radeon_crtc->crtc_id == 0) {
219 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
220 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl & ~1);
221 } else {
222 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
223 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl & ~1);
224 }
225 } else {
226 pll_flags |= RADEON_PLL_LEGACY;
227
228 if (mode->clock > 200000) /* range limits??? */
229 pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
230 else
231 pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
232
233 }
234
235 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
236 if (encoder->crtc == crtc) {
237 if (!ASIC_IS_AVIVO(rdev)) {
238 if (encoder->encoder_type !=
239 DRM_MODE_ENCODER_DAC)
240 pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
241 if (!ASIC_IS_AVIVO(rdev)
242 && (encoder->encoder_type ==
243 DRM_MODE_ENCODER_LVDS))
244 pll_flags |= RADEON_PLL_USE_REF_DIV;
245 }
246 radeon_encoder = to_radeon_encoder(encoder);
247 }
248 }
249
250 if (radeon_crtc->crtc_id == 0)
251 pll = &rdev->clock.p1pll;
252 else
253 pll = &rdev->clock.p2pll;
254
255 radeon_compute_pll(pll, mode->clock, &sclock, &fb_div, &frac_fb_div,
256 &ref_div, &post_div, pll_flags);
257
258 atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
259 &crev);
260
261 switch (frev) {
262 case 1:
263 switch (crev) {
264 case 1:
265 spc1_ptr = (PIXEL_CLOCK_PARAMETERS *) & args.sPCLKInput;
266 spc1_ptr->usPixelClock = cpu_to_le16(sclock);
267 spc1_ptr->usRefDiv = cpu_to_le16(ref_div);
268 spc1_ptr->usFbDiv = cpu_to_le16(fb_div);
269 spc1_ptr->ucFracFbDiv = frac_fb_div;
270 spc1_ptr->ucPostDiv = post_div;
271 spc1_ptr->ucPpll =
272 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
273 spc1_ptr->ucCRTC = radeon_crtc->crtc_id;
274 spc1_ptr->ucRefDivSrc = 1;
275 break;
276 case 2:
277 spc2_ptr =
278 (PIXEL_CLOCK_PARAMETERS_V2 *) & args.sPCLKInput;
279 spc2_ptr->usPixelClock = cpu_to_le16(sclock);
280 spc2_ptr->usRefDiv = cpu_to_le16(ref_div);
281 spc2_ptr->usFbDiv = cpu_to_le16(fb_div);
282 spc2_ptr->ucFracFbDiv = frac_fb_div;
283 spc2_ptr->ucPostDiv = post_div;
284 spc2_ptr->ucPpll =
285 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
286 spc2_ptr->ucCRTC = radeon_crtc->crtc_id;
287 spc2_ptr->ucRefDivSrc = 1;
288 break;
289 case 3:
290 if (!encoder)
291 return;
292 spc3_ptr =
293 (PIXEL_CLOCK_PARAMETERS_V3 *) & args.sPCLKInput;
294 spc3_ptr->usPixelClock = cpu_to_le16(sclock);
295 spc3_ptr->usRefDiv = cpu_to_le16(ref_div);
296 spc3_ptr->usFbDiv = cpu_to_le16(fb_div);
297 spc3_ptr->ucFracFbDiv = frac_fb_div;
298 spc3_ptr->ucPostDiv = post_div;
299 spc3_ptr->ucPpll =
300 radeon_crtc->crtc_id ? ATOM_PPLL2 : ATOM_PPLL1;
301 spc3_ptr->ucMiscInfo = (radeon_crtc->crtc_id << 2);
302 spc3_ptr->ucTransmitterId = radeon_encoder->encoder_id;
303 spc3_ptr->ucEncoderMode =
304 atombios_get_encoder_mode(encoder);
305 break;
306 default:
307 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
308 return;
309 }
310 break;
311 default:
312 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
313 return;
314 }
315
316 printk("executing set pll\n");
317 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
318}
319
320int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
321 struct drm_framebuffer *old_fb)
322{
323 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
324 struct drm_device *dev = crtc->dev;
325 struct radeon_device *rdev = dev->dev_private;
326 struct radeon_framebuffer *radeon_fb;
327 struct drm_gem_object *obj;
328 struct drm_radeon_gem_object *obj_priv;
329 uint64_t fb_location;
330 uint32_t fb_format, fb_pitch_pixels;
331
332 if (!crtc->fb)
333 return -EINVAL;
334
335 radeon_fb = to_radeon_framebuffer(crtc->fb);
336
337 obj = radeon_fb->obj;
338 obj_priv = obj->driver_private;
339
340 if (radeon_gem_object_pin(obj, RADEON_GEM_DOMAIN_VRAM, &fb_location)) {
341 return -EINVAL;
342 }
343
344 switch (crtc->fb->bits_per_pixel) {
345 case 15:
346 fb_format =
347 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
348 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
349 break;
350 case 16:
351 fb_format =
352 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
353 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
354 break;
355 case 24:
356 case 32:
357 fb_format =
358 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
359 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
360 break;
361 default:
362 DRM_ERROR("Unsupported screen depth %d\n",
363 crtc->fb->bits_per_pixel);
364 return -EINVAL;
365 }
366
367 /* TODO tiling */
368 if (radeon_crtc->crtc_id == 0)
369 WREG32(AVIVO_D1VGA_CONTROL, 0);
370 else
371 WREG32(AVIVO_D2VGA_CONTROL, 0);
372 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
373 (u32) fb_location);
374 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
375 radeon_crtc->crtc_offset, (u32) fb_location);
376 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
377
378 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
379 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
380 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
381 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
382 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, crtc->fb->width);
383 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, crtc->fb->height);
384
385 fb_pitch_pixels = crtc->fb->pitch / (crtc->fb->bits_per_pixel / 8);
386 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
387 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
388
389 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
390 crtc->mode.vdisplay);
391 x &= ~3;
392 y &= ~1;
393 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
394 (x << 16) | y);
395 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
396 (crtc->mode.hdisplay << 16) | crtc->mode.vdisplay);
397
398 if (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE)
399 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset,
400 AVIVO_D1MODE_INTERLEAVE_EN);
401 else
402 WREG32(AVIVO_D1MODE_DATA_FORMAT + radeon_crtc->crtc_offset, 0);
403
404 if (old_fb && old_fb != crtc->fb) {
405 radeon_fb = to_radeon_framebuffer(old_fb);
406 radeon_gem_object_unpin(radeon_fb->obj);
407 }
408 return 0;
409}
410
411int atombios_crtc_mode_set(struct drm_crtc *crtc,
412 struct drm_display_mode *mode,
413 struct drm_display_mode *adjusted_mode,
414 int x, int y, struct drm_framebuffer *old_fb)
415{
416 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
417 struct drm_device *dev = crtc->dev;
418 struct radeon_device *rdev = dev->dev_private;
419 struct drm_encoder *encoder;
420 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION crtc_timing;
421
422 /* TODO color tiling */
423 memset(&crtc_timing, 0, sizeof(crtc_timing));
424
425 /* TODO tv */
426 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
427
428 }
429
430 crtc_timing.ucCRTC = radeon_crtc->crtc_id;
431 crtc_timing.usH_Total = adjusted_mode->crtc_htotal;
432 crtc_timing.usH_Disp = adjusted_mode->crtc_hdisplay;
433 crtc_timing.usH_SyncStart = adjusted_mode->crtc_hsync_start;
434 crtc_timing.usH_SyncWidth =
435 adjusted_mode->crtc_hsync_end - adjusted_mode->crtc_hsync_start;
436
437 crtc_timing.usV_Total = adjusted_mode->crtc_vtotal;
438 crtc_timing.usV_Disp = adjusted_mode->crtc_vdisplay;
439 crtc_timing.usV_SyncStart = adjusted_mode->crtc_vsync_start;
440 crtc_timing.usV_SyncWidth =
441 adjusted_mode->crtc_vsync_end - adjusted_mode->crtc_vsync_start;
442
443 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
444 crtc_timing.susModeMiscInfo.usAccess |= ATOM_VSYNC_POLARITY;
445
446 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
447 crtc_timing.susModeMiscInfo.usAccess |= ATOM_HSYNC_POLARITY;
448
449 if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
450 crtc_timing.susModeMiscInfo.usAccess |= ATOM_COMPOSITESYNC;
451
452 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
453 crtc_timing.susModeMiscInfo.usAccess |= ATOM_INTERLACE;
454
455 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
456 crtc_timing.susModeMiscInfo.usAccess |= ATOM_DOUBLE_CLOCK_MODE;
457
458 atombios_crtc_set_pll(crtc, adjusted_mode);
459 atombios_crtc_set_timing(crtc, &crtc_timing);
460
461 if (ASIC_IS_AVIVO(rdev))
462 atombios_crtc_set_base(crtc, x, y, old_fb);
463 else {
464 if (radeon_crtc->crtc_id == 0) {
465 SET_CRTC_USING_DTD_TIMING_PARAMETERS crtc_dtd_timing;
466 memset(&crtc_dtd_timing, 0, sizeof(crtc_dtd_timing));
467
468 /* setup FP shadow regs on R4xx */
469 crtc_dtd_timing.ucCRTC = radeon_crtc->crtc_id;
470 crtc_dtd_timing.usH_Size = adjusted_mode->crtc_hdisplay;
471 crtc_dtd_timing.usV_Size = adjusted_mode->crtc_vdisplay;
472 crtc_dtd_timing.usH_Blanking_Time =
473 adjusted_mode->crtc_hblank_end -
474 adjusted_mode->crtc_hdisplay;
475 crtc_dtd_timing.usV_Blanking_Time =
476 adjusted_mode->crtc_vblank_end -
477 adjusted_mode->crtc_vdisplay;
478 crtc_dtd_timing.usH_SyncOffset =
479 adjusted_mode->crtc_hsync_start -
480 adjusted_mode->crtc_hdisplay;
481 crtc_dtd_timing.usV_SyncOffset =
482 adjusted_mode->crtc_vsync_start -
483 adjusted_mode->crtc_vdisplay;
484 crtc_dtd_timing.usH_SyncWidth =
485 adjusted_mode->crtc_hsync_end -
486 adjusted_mode->crtc_hsync_start;
487 crtc_dtd_timing.usV_SyncWidth =
488 adjusted_mode->crtc_vsync_end -
489 adjusted_mode->crtc_vsync_start;
490 /* crtc_dtd_timing.ucH_Border = adjusted_mode->crtc_hborder; */
491 /* crtc_dtd_timing.ucV_Border = adjusted_mode->crtc_vborder; */
492
493 if (adjusted_mode->flags & DRM_MODE_FLAG_NVSYNC)
494 crtc_dtd_timing.susModeMiscInfo.usAccess |=
495 ATOM_VSYNC_POLARITY;
496
497 if (adjusted_mode->flags & DRM_MODE_FLAG_NHSYNC)
498 crtc_dtd_timing.susModeMiscInfo.usAccess |=
499 ATOM_HSYNC_POLARITY;
500
501 if (adjusted_mode->flags & DRM_MODE_FLAG_CSYNC)
502 crtc_dtd_timing.susModeMiscInfo.usAccess |=
503 ATOM_COMPOSITESYNC;
504
505 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
506 crtc_dtd_timing.susModeMiscInfo.usAccess |=
507 ATOM_INTERLACE;
508
509 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
510 crtc_dtd_timing.susModeMiscInfo.usAccess |=
511 ATOM_DOUBLE_CLOCK_MODE;
512
513 atombios_set_crtc_dtd_timing(crtc, &crtc_dtd_timing);
514 }
515 radeon_crtc_set_base(crtc, x, y, old_fb);
516 radeon_legacy_atom_set_surface(crtc);
517 }
518 return 0;
519}
520
521static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
522 struct drm_display_mode *mode,
523 struct drm_display_mode *adjusted_mode)
524{
525 return true;
526}
527
528static void atombios_crtc_prepare(struct drm_crtc *crtc)
529{
530 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
531 atombios_lock_crtc(crtc, 1);
532}
533
534static void atombios_crtc_commit(struct drm_crtc *crtc)
535{
536 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
537 atombios_lock_crtc(crtc, 0);
538}
539
540static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
541 .dpms = atombios_crtc_dpms,
542 .mode_fixup = atombios_crtc_mode_fixup,
543 .mode_set = atombios_crtc_mode_set,
544 .mode_set_base = atombios_crtc_set_base,
545 .prepare = atombios_crtc_prepare,
546 .commit = atombios_crtc_commit,
547};
548
549void radeon_atombios_init_crtc(struct drm_device *dev,
550 struct radeon_crtc *radeon_crtc)
551{
552 if (radeon_crtc->crtc_id == 1)
553 radeon_crtc->crtc_offset =
554 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
555 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
556}
557
558void radeon_init_disp_bw_avivo(struct drm_device *dev,
559 struct drm_display_mode *mode1,
560 uint32_t pixel_bytes1,
561 struct drm_display_mode *mode2,
562 uint32_t pixel_bytes2)
563{
564 struct radeon_device *rdev = dev->dev_private;
565 fixed20_12 min_mem_eff;
566 fixed20_12 peak_disp_bw, mem_bw, pix_clk, pix_clk2, temp_ff;
567 fixed20_12 sclk_ff, mclk_ff;
568 uint32_t dc_lb_memory_split, temp;
569
570 min_mem_eff.full = rfixed_const_8(0);
571 if (rdev->disp_priority == 2) {
572 uint32_t mc_init_misc_lat_timer = 0;
573 if (rdev->family == CHIP_RV515)
574 mc_init_misc_lat_timer =
575 RREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER);
576 else if (rdev->family == CHIP_RS690)
577 mc_init_misc_lat_timer =
578 RREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER);
579
580 mc_init_misc_lat_timer &=
581 ~(R300_MC_DISP1R_INIT_LAT_MASK <<
582 R300_MC_DISP1R_INIT_LAT_SHIFT);
583 mc_init_misc_lat_timer &=
584 ~(R300_MC_DISP0R_INIT_LAT_MASK <<
585 R300_MC_DISP0R_INIT_LAT_SHIFT);
586
587 if (mode2)
588 mc_init_misc_lat_timer |=
589 (1 << R300_MC_DISP1R_INIT_LAT_SHIFT);
590 if (mode1)
591 mc_init_misc_lat_timer |=
592 (1 << R300_MC_DISP0R_INIT_LAT_SHIFT);
593
594 if (rdev->family == CHIP_RV515)
595 WREG32_MC(RV515_MC_INIT_MISC_LAT_TIMER,
596 mc_init_misc_lat_timer);
597 else if (rdev->family == CHIP_RS690)
598 WREG32_MC(RS690_MC_INIT_MISC_LAT_TIMER,
599 mc_init_misc_lat_timer);
600 }
601
602 /*
603 * determine is there is enough bw for current mode
604 */
605 temp_ff.full = rfixed_const(100);
606 mclk_ff.full = rfixed_const(rdev->clock.default_mclk);
607 mclk_ff.full = rfixed_div(mclk_ff, temp_ff);
608 sclk_ff.full = rfixed_const(rdev->clock.default_sclk);
609 sclk_ff.full = rfixed_div(sclk_ff, temp_ff);
610
611 temp = (rdev->mc.vram_width / 8) * (rdev->mc.vram_is_ddr ? 2 : 1);
612 temp_ff.full = rfixed_const(temp);
613 mem_bw.full = rfixed_mul(mclk_ff, temp_ff);
614 mem_bw.full = rfixed_mul(mem_bw, min_mem_eff);
615
616 pix_clk.full = 0;
617 pix_clk2.full = 0;
618 peak_disp_bw.full = 0;
619 if (mode1) {
620 temp_ff.full = rfixed_const(1000);
621 pix_clk.full = rfixed_const(mode1->clock); /* convert to fixed point */
622 pix_clk.full = rfixed_div(pix_clk, temp_ff);
623 temp_ff.full = rfixed_const(pixel_bytes1);
624 peak_disp_bw.full += rfixed_mul(pix_clk, temp_ff);
625 }
626 if (mode2) {
627 temp_ff.full = rfixed_const(1000);
628 pix_clk2.full = rfixed_const(mode2->clock); /* convert to fixed point */
629 pix_clk2.full = rfixed_div(pix_clk2, temp_ff);
630 temp_ff.full = rfixed_const(pixel_bytes2);
631 peak_disp_bw.full += rfixed_mul(pix_clk2, temp_ff);
632 }
633
634 if (peak_disp_bw.full >= mem_bw.full) {
635 DRM_ERROR
636 ("You may not have enough display bandwidth for current mode\n"
637 "If you have flickering problem, try to lower resolution, refresh rate, or color depth\n");
638 printk("peak disp bw %d, mem_bw %d\n",
639 rfixed_trunc(peak_disp_bw), rfixed_trunc(mem_bw));
640 }
641
642 /*
643 * Line Buffer Setup
644 * There is a single line buffer shared by both display controllers.
645 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between the display
646 * controllers. The paritioning can either be done manually or via one of four
647 * preset allocations specified in bits 1:0:
648 * 0 - line buffer is divided in half and shared between each display controller
649 * 1 - D1 gets 3/4 of the line buffer, D2 gets 1/4
650 * 2 - D1 gets the whole buffer
651 * 3 - D1 gets 1/4 of the line buffer, D2 gets 3/4
652 * Setting bit 2 of DC_LB_MEMORY_SPLIT controls switches to manual allocation mode.
653 * In manual allocation mode, D1 always starts at 0, D1 end/2 is specified in bits
654 * 14:4; D2 allocation follows D1.
655 */
656
657 /* is auto or manual better ? */
658 dc_lb_memory_split =
659 RREG32(AVIVO_DC_LB_MEMORY_SPLIT) & ~AVIVO_DC_LB_MEMORY_SPLIT_MASK;
660 dc_lb_memory_split &= ~AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
661#if 1
662 /* auto */
663 if (mode1 && mode2) {
664 if (mode1->hdisplay > mode2->hdisplay) {
665 if (mode1->hdisplay > 2560)
666 dc_lb_memory_split |=
667 AVIVO_DC_LB_MEMORY_SPLIT_D1_3Q_D2_1Q;
668 else
669 dc_lb_memory_split |=
670 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
671 } else if (mode2->hdisplay > mode1->hdisplay) {
672 if (mode2->hdisplay > 2560)
673 dc_lb_memory_split |=
674 AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
675 else
676 dc_lb_memory_split |=
677 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
678 } else
679 dc_lb_memory_split |=
680 AVIVO_DC_LB_MEMORY_SPLIT_D1HALF_D2HALF;
681 } else if (mode1) {
682 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_ONLY;
683 } else if (mode2) {
684 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_D1_1Q_D2_3Q;
685 }
686#else
687 /* manual */
688 dc_lb_memory_split |= AVIVO_DC_LB_MEMORY_SPLIT_SHIFT_MODE;
689 dc_lb_memory_split &=
690 ~(AVIVO_DC_LB_DISP1_END_ADR_MASK <<
691 AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
692 if (mode1) {
693 dc_lb_memory_split |=
694 ((((mode1->hdisplay / 2) + 64) & AVIVO_DC_LB_DISP1_END_ADR_MASK)
695 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
696 } else if (mode2) {
697 dc_lb_memory_split |= (0 << AVIVO_DC_LB_DISP1_END_ADR_SHIFT);
698 }
699#endif
700 WREG32(AVIVO_DC_LB_MEMORY_SPLIT, dc_lb_memory_split);
701}