blob: 1686f8291b6d314efdb46a6a63107cb7692f45e7 [file] [log] [blame]
Ben Skeggs6ee73862009-12-11 19:24:15 +10001/*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27#include "drmP.h"
28#include "drm_mode.h"
29#include "drm_crtc_helper.h"
30
31#define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
32#include "nouveau_reg.h"
33#include "nouveau_drv.h"
34#include "nouveau_hw.h"
35#include "nouveau_encoder.h"
36#include "nouveau_crtc.h"
37#include "nouveau_fb.h"
38#include "nouveau_connector.h"
39#include "nv50_display.h"
40
41static void
42nv50_crtc_lut_load(struct drm_crtc *crtc)
43{
44 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
45 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
46 int i;
47
Maarten Maathuisef2bb502009-12-13 16:53:12 +010048 NV_DEBUG_KMS(crtc->dev, "\n");
Ben Skeggs6ee73862009-12-11 19:24:15 +100049
50 for (i = 0; i < 256; i++) {
51 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
52 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
53 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
54 }
55
56 if (nv_crtc->lut.depth == 30) {
57 writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
58 writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
59 writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
60 }
61}
62
63int
64nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
65{
66 struct drm_device *dev = nv_crtc->base.dev;
67 struct drm_nouveau_private *dev_priv = dev->dev_private;
68 struct nouveau_channel *evo = dev_priv->evo;
69 int index = nv_crtc->index, ret;
70
Maarten Maathuisef2bb502009-12-13 16:53:12 +010071 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
72 NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
Ben Skeggs6ee73862009-12-11 19:24:15 +100073
74 if (blanked) {
75 nv_crtc->cursor.hide(nv_crtc, false);
76
77 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
78 if (ret) {
79 NV_ERROR(dev, "no space while blanking crtc\n");
80 return ret;
81 }
82 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
83 OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
84 OUT_RING(evo, 0);
85 if (dev_priv->chipset != 0x50) {
86 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
87 OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
88 }
89
90 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
91 OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
92 } else {
93 if (nv_crtc->cursor.visible)
94 nv_crtc->cursor.show(nv_crtc, false);
95 else
96 nv_crtc->cursor.hide(nv_crtc, false);
97
98 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
99 if (ret) {
100 NV_ERROR(dev, "no space while unblanking crtc\n");
101 return ret;
102 }
103 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
104 OUT_RING(evo, nv_crtc->lut.depth == 8 ?
105 NV50_EVO_CRTC_CLUT_MODE_OFF :
106 NV50_EVO_CRTC_CLUT_MODE_ON);
107 OUT_RING(evo, (nv_crtc->lut.nvbo->bo.mem.mm_node->start <<
108 PAGE_SHIFT) >> 8);
109 if (dev_priv->chipset != 0x50) {
110 BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
111 OUT_RING(evo, NvEvoVRAM);
112 }
113
114 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
115 OUT_RING(evo, nv_crtc->fb.offset >> 8);
116 OUT_RING(evo, 0);
117 BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
118 if (dev_priv->chipset != 0x50)
119 if (nv_crtc->fb.tile_flags == 0x7a00)
120 OUT_RING(evo, NvEvoFB32);
121 else
122 if (nv_crtc->fb.tile_flags == 0x7000)
123 OUT_RING(evo, NvEvoFB16);
124 else
125 OUT_RING(evo, NvEvoVRAM);
126 else
127 OUT_RING(evo, NvEvoVRAM);
128 }
129
130 nv_crtc->fb.blanked = blanked;
131 return 0;
132}
133
134static int
135nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
136{
137 struct drm_device *dev = nv_crtc->base.dev;
138 struct drm_nouveau_private *dev_priv = dev->dev_private;
139 struct nouveau_channel *evo = dev_priv->evo;
140 int ret;
141
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100142 NV_DEBUG_KMS(dev, "\n");
Ben Skeggs6ee73862009-12-11 19:24:15 +1000143
144 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
145 if (ret) {
146 NV_ERROR(dev, "no space while setting dither\n");
147 return ret;
148 }
149
150 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DITHER_CTRL), 1);
151 if (on)
152 OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_ON);
153 else
154 OUT_RING(evo, NV50_EVO_CRTC_DITHER_CTRL_OFF);
155
156 if (update) {
157 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
158 OUT_RING(evo, 0);
159 FIRE_RING(evo);
160 }
161
162 return 0;
163}
164
165struct nouveau_connector *
166nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
167{
168 struct drm_device *dev = nv_crtc->base.dev;
169 struct drm_connector *connector;
170 struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
171
172 /* The safest approach is to find an encoder with the right crtc, that
173 * is also linked to a connector. */
174 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
175 if (connector->encoder)
176 if (connector->encoder->crtc == crtc)
177 return nouveau_connector(connector);
178 }
179
180 return NULL;
181}
182
183static int
184nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, int scaling_mode, bool update)
185{
186 struct nouveau_connector *nv_connector =
187 nouveau_crtc_connector_get(nv_crtc);
188 struct drm_device *dev = nv_crtc->base.dev;
189 struct drm_nouveau_private *dev_priv = dev->dev_private;
190 struct nouveau_channel *evo = dev_priv->evo;
191 struct drm_display_mode *native_mode = NULL;
192 struct drm_display_mode *mode = &nv_crtc->base.mode;
193 uint32_t outX, outY, horiz, vert;
194 int ret;
195
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100196 NV_DEBUG_KMS(dev, "\n");
Ben Skeggs6ee73862009-12-11 19:24:15 +1000197
198 switch (scaling_mode) {
199 case DRM_MODE_SCALE_NONE:
200 break;
201 default:
202 if (!nv_connector || !nv_connector->native_mode) {
203 NV_ERROR(dev, "No native mode, forcing panel scaling\n");
204 scaling_mode = DRM_MODE_SCALE_NONE;
205 } else {
206 native_mode = nv_connector->native_mode;
207 }
208 break;
209 }
210
211 switch (scaling_mode) {
212 case DRM_MODE_SCALE_ASPECT:
213 horiz = (native_mode->hdisplay << 19) / mode->hdisplay;
214 vert = (native_mode->vdisplay << 19) / mode->vdisplay;
215
216 if (vert > horiz) {
217 outX = (mode->hdisplay * horiz) >> 19;
218 outY = (mode->vdisplay * horiz) >> 19;
219 } else {
220 outX = (mode->hdisplay * vert) >> 19;
221 outY = (mode->vdisplay * vert) >> 19;
222 }
223 break;
224 case DRM_MODE_SCALE_FULLSCREEN:
225 outX = native_mode->hdisplay;
226 outY = native_mode->vdisplay;
227 break;
228 case DRM_MODE_SCALE_CENTER:
229 case DRM_MODE_SCALE_NONE:
230 default:
231 outX = mode->hdisplay;
232 outY = mode->vdisplay;
233 break;
234 }
235
236 ret = RING_SPACE(evo, update ? 7 : 5);
237 if (ret)
238 return ret;
239
240 /* Got a better name for SCALER_ACTIVE? */
241 /* One day i've got to really figure out why this is needed. */
242 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
243 if ((mode->flags & DRM_MODE_FLAG_DBLSCAN) ||
244 (mode->flags & DRM_MODE_FLAG_INTERLACE) ||
245 mode->hdisplay != outX || mode->vdisplay != outY) {
246 OUT_RING(evo, NV50_EVO_CRTC_SCALE_CTRL_ACTIVE);
247 } else {
248 OUT_RING(evo, NV50_EVO_CRTC_SCALE_CTRL_INACTIVE);
249 }
250
251 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
252 OUT_RING(evo, outY << 16 | outX);
253 OUT_RING(evo, outY << 16 | outX);
254
255 if (update) {
256 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
257 OUT_RING(evo, 0);
258 FIRE_RING(evo);
259 }
260
261 return 0;
262}
263
264int
265nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
266{
Ben Skeggs1ac7b522010-08-04 22:08:03 +1000267 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggse9ebb682010-04-28 14:07:06 +1000268 struct pll_lims pll;
Ben Skeggs5b321652010-09-24 09:17:02 +1000269 uint32_t reg1, reg2;
Ben Skeggse9ebb682010-04-28 14:07:06 +1000270 int ret, N1, M1, N2, M2, P;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000271
Ben Skeggs5b321652010-09-24 09:17:02 +1000272 ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000273 if (ret)
274 return ret;
275
Ben Skeggse9ebb682010-04-28 14:07:06 +1000276 if (pll.vco2.maxfreq) {
277 ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
278 if (ret <= 0)
279 return 0;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000280
Ben Skeggs17b96cc2010-04-23 03:53:42 +1000281 NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
Ben Skeggse9ebb682010-04-28 14:07:06 +1000282 pclk, ret, N1, M1, N2, M2, P);
Ben Skeggs17b96cc2010-04-23 03:53:42 +1000283
Ben Skeggs5b321652010-09-24 09:17:02 +1000284 reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
285 reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
286 nv_wr32(dev, pll.reg + 0, 0x10000611);
287 nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
288 nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
Ben Skeggs1ac7b522010-08-04 22:08:03 +1000289 } else
290 if (dev_priv->chipset < NV_C0) {
Ben Skeggse9ebb682010-04-28 14:07:06 +1000291 ret = nv50_calc_pll2(dev, &pll, pclk, &N1, &N2, &M1, &P);
292 if (ret <= 0)
293 return 0;
Ben Skeggs17b96cc2010-04-23 03:53:42 +1000294
Ben Skeggse9ebb682010-04-28 14:07:06 +1000295 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
296 pclk, ret, N1, N2, M1, P);
297
Ben Skeggs5b321652010-09-24 09:17:02 +1000298 reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
299 nv_wr32(dev, pll.reg + 0, 0x50000610);
300 nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
301 nv_wr32(dev, pll.reg + 8, N2);
Ben Skeggs1ac7b522010-08-04 22:08:03 +1000302 } else {
303 ret = nv50_calc_pll2(dev, &pll, pclk, &N1, &N2, &M1, &P);
304 if (ret <= 0)
305 return 0;
306
307 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
308 pclk, ret, N1, N2, M1, P);
309
Ben Skeggs5b321652010-09-24 09:17:02 +1000310 nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
311 nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
312 nv_wr32(dev, pll.reg + 0x10, N2 << 16);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000313 }
314
315 return 0;
316}
317
318static void
319nv50_crtc_destroy(struct drm_crtc *crtc)
320{
Marcin Slusarzdd19e442010-01-30 15:41:00 +0100321 struct drm_device *dev;
322 struct nouveau_crtc *nv_crtc;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000323
324 if (!crtc)
325 return;
326
Marcin Slusarzdd19e442010-01-30 15:41:00 +0100327 dev = crtc->dev;
328 nv_crtc = nouveau_crtc(crtc);
329
330 NV_DEBUG_KMS(dev, "\n");
331
Ben Skeggs6ee73862009-12-11 19:24:15 +1000332 drm_crtc_cleanup(&nv_crtc->base);
333
334 nv50_cursor_fini(nv_crtc);
335
Ben Skeggs9d59e8a2010-08-27 13:04:41 +1000336 nouveau_bo_unmap(nv_crtc->lut.nvbo);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000337 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
Ben Skeggs9d59e8a2010-08-27 13:04:41 +1000338 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000339 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
340 kfree(nv_crtc->mode);
341 kfree(nv_crtc);
342}
343
344int
345nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
346 uint32_t buffer_handle, uint32_t width, uint32_t height)
347{
348 struct drm_device *dev = crtc->dev;
349 struct drm_nouveau_private *dev_priv = dev->dev_private;
350 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
351 struct nouveau_bo *cursor = NULL;
352 struct drm_gem_object *gem;
353 int ret = 0, i;
354
355 if (width != 64 || height != 64)
356 return -EINVAL;
357
358 if (!buffer_handle) {
359 nv_crtc->cursor.hide(nv_crtc, true);
360 return 0;
361 }
362
363 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
364 if (!gem)
Chris Wilsonbf79cb92010-08-04 14:19:46 +0100365 return -ENOENT;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000366 cursor = nouveau_gem_object(gem);
367
368 ret = nouveau_bo_map(cursor);
369 if (ret)
370 goto out;
371
372 /* The simple will do for now. */
373 for (i = 0; i < 64 * 64; i++)
374 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
375
376 nouveau_bo_unmap(cursor);
377
378 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset -
379 dev_priv->vm_vram_base);
380 nv_crtc->cursor.show(nv_crtc, true);
381
382out:
Luca Barbieribc9025b2010-02-09 05:49:12 +0000383 drm_gem_object_unreference_unlocked(gem);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000384 return ret;
385}
386
387int
388nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
389{
390 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
391
392 nv_crtc->cursor.set_pos(nv_crtc, x, y);
393 return 0;
394}
395
396static void
397nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
James Simmons72034252010-08-03 01:33:19 +0100398 uint32_t start, uint32_t size)
Ben Skeggs6ee73862009-12-11 19:24:15 +1000399{
James Simmons72034252010-08-03 01:33:19 +0100400 int end = (start + size > 256) ? 256 : start + size, i;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000401 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000402
James Simmons72034252010-08-03 01:33:19 +0100403 for (i = start; i < end; i++) {
Ben Skeggs6ee73862009-12-11 19:24:15 +1000404 nv_crtc->lut.r[i] = r[i];
405 nv_crtc->lut.g[i] = g[i];
406 nv_crtc->lut.b[i] = b[i];
407 }
408
409 /* We need to know the depth before we upload, but it's possible to
410 * get called before a framebuffer is bound. If this is the case,
411 * mark the lut values as dirty by setting depth==0, and it'll be
412 * uploaded on the first mode_set_base()
413 */
414 if (!nv_crtc->base.fb) {
415 nv_crtc->lut.depth = 0;
416 return;
417 }
418
419 nv50_crtc_lut_load(crtc);
420}
421
422static void
423nv50_crtc_save(struct drm_crtc *crtc)
424{
425 NV_ERROR(crtc->dev, "!!\n");
426}
427
428static void
429nv50_crtc_restore(struct drm_crtc *crtc)
430{
431 NV_ERROR(crtc->dev, "!!\n");
432}
433
434static const struct drm_crtc_funcs nv50_crtc_funcs = {
435 .save = nv50_crtc_save,
436 .restore = nv50_crtc_restore,
437 .cursor_set = nv50_crtc_cursor_set,
438 .cursor_move = nv50_crtc_cursor_move,
439 .gamma_set = nv50_crtc_gamma_set,
440 .set_config = drm_crtc_helper_set_config,
441 .destroy = nv50_crtc_destroy,
442};
443
444static void
445nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
446{
447}
448
449static void
450nv50_crtc_prepare(struct drm_crtc *crtc)
451{
452 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
453 struct drm_device *dev = crtc->dev;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000454
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100455 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000456
Ben Skeggs6ee73862009-12-11 19:24:15 +1000457 nv50_crtc_blank(nv_crtc, true);
458}
459
460static void
461nv50_crtc_commit(struct drm_crtc *crtc)
462{
Ben Skeggs6ee73862009-12-11 19:24:15 +1000463 struct drm_device *dev = crtc->dev;
464 struct drm_nouveau_private *dev_priv = dev->dev_private;
465 struct nouveau_channel *evo = dev_priv->evo;
466 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
467 int ret;
468
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100469 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000470
471 nv50_crtc_blank(nv_crtc, false);
472
Ben Skeggs6ee73862009-12-11 19:24:15 +1000473 ret = RING_SPACE(evo, 2);
474 if (ret) {
475 NV_ERROR(dev, "no space while committing crtc\n");
476 return;
477 }
478 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
Ben Skeggs835aadb2010-07-05 15:19:16 +1000479 OUT_RING (evo, 0);
480 FIRE_RING (evo);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000481}
482
483static bool
484nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
485 struct drm_display_mode *adjusted_mode)
486{
487 return true;
488}
489
490static int
491nv50_crtc_do_mode_set_base(struct drm_crtc *crtc, int x, int y,
492 struct drm_framebuffer *old_fb, bool update)
493{
494 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
495 struct drm_device *dev = nv_crtc->base.dev;
496 struct drm_nouveau_private *dev_priv = dev->dev_private;
497 struct nouveau_channel *evo = dev_priv->evo;
498 struct drm_framebuffer *drm_fb = nv_crtc->base.fb;
499 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
500 int ret, format;
501
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100502 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000503
504 switch (drm_fb->depth) {
505 case 8:
506 format = NV50_EVO_CRTC_FB_DEPTH_8;
507 break;
508 case 15:
509 format = NV50_EVO_CRTC_FB_DEPTH_15;
510 break;
511 case 16:
512 format = NV50_EVO_CRTC_FB_DEPTH_16;
513 break;
514 case 24:
515 case 32:
516 format = NV50_EVO_CRTC_FB_DEPTH_24;
517 break;
518 case 30:
519 format = NV50_EVO_CRTC_FB_DEPTH_30;
520 break;
521 default:
522 NV_ERROR(dev, "unknown depth %d\n", drm_fb->depth);
523 return -EINVAL;
524 }
525
526 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
527 if (ret)
528 return ret;
529
530 if (old_fb) {
531 struct nouveau_framebuffer *ofb = nouveau_framebuffer(old_fb);
532 nouveau_bo_unpin(ofb->nvbo);
533 }
534
535 nv_crtc->fb.offset = fb->nvbo->bo.offset - dev_priv->vm_vram_base;
536 nv_crtc->fb.tile_flags = fb->nvbo->tile_flags;
537 nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
538 if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
539 ret = RING_SPACE(evo, 2);
540 if (ret)
541 return ret;
542
543 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
544 if (nv_crtc->fb.tile_flags == 0x7a00)
545 OUT_RING(evo, NvEvoFB32);
546 else
547 if (nv_crtc->fb.tile_flags == 0x7000)
548 OUT_RING(evo, NvEvoFB16);
549 else
550 OUT_RING(evo, NvEvoVRAM);
551 }
552
553 ret = RING_SPACE(evo, 12);
554 if (ret)
555 return ret;
556
557 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
558 OUT_RING(evo, nv_crtc->fb.offset >> 8);
559 OUT_RING(evo, 0);
560 OUT_RING(evo, (drm_fb->height << 16) | drm_fb->width);
561 if (!nv_crtc->fb.tile_flags) {
562 OUT_RING(evo, drm_fb->pitch | (1 << 20));
563 } else {
564 OUT_RING(evo, ((drm_fb->pitch / 4) << 4) |
565 fb->nvbo->tile_mode);
566 }
567 if (dev_priv->chipset == 0x50)
568 OUT_RING(evo, (fb->nvbo->tile_flags << 8) | format);
569 else
570 OUT_RING(evo, format);
571
572 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
573 OUT_RING(evo, fb->base.depth == 8 ?
574 NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
575
576 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
577 OUT_RING(evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
578 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
579 OUT_RING(evo, (y << 16) | x);
580
581 if (nv_crtc->lut.depth != fb->base.depth) {
582 nv_crtc->lut.depth = fb->base.depth;
583 nv50_crtc_lut_load(crtc);
584 }
585
586 if (update) {
587 ret = RING_SPACE(evo, 2);
588 if (ret)
589 return ret;
590 BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
591 OUT_RING(evo, 0);
592 FIRE_RING(evo);
593 }
594
595 return 0;
596}
597
598static int
599nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *mode,
600 struct drm_display_mode *adjusted_mode, int x, int y,
601 struct drm_framebuffer *old_fb)
602{
603 struct drm_device *dev = crtc->dev;
604 struct drm_nouveau_private *dev_priv = dev->dev_private;
605 struct nouveau_channel *evo = dev_priv->evo;
606 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
607 struct nouveau_connector *nv_connector = NULL;
608 uint32_t hsync_dur, vsync_dur, hsync_start_to_end, vsync_start_to_end;
609 uint32_t hunk1, vunk1, vunk2a, vunk2b;
610 int ret;
611
612 /* Find the connector attached to this CRTC */
613 nv_connector = nouveau_crtc_connector_get(nv_crtc);
614
615 *nv_crtc->mode = *adjusted_mode;
616
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100617 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000618
619 hsync_dur = adjusted_mode->hsync_end - adjusted_mode->hsync_start;
620 vsync_dur = adjusted_mode->vsync_end - adjusted_mode->vsync_start;
621 hsync_start_to_end = adjusted_mode->htotal - adjusted_mode->hsync_start;
622 vsync_start_to_end = adjusted_mode->vtotal - adjusted_mode->vsync_start;
623 /* I can't give this a proper name, anyone else can? */
624 hunk1 = adjusted_mode->htotal -
625 adjusted_mode->hsync_start + adjusted_mode->hdisplay;
626 vunk1 = adjusted_mode->vtotal -
627 adjusted_mode->vsync_start + adjusted_mode->vdisplay;
628 /* Another strange value, this time only for interlaced adjusted_modes. */
629 vunk2a = 2 * adjusted_mode->vtotal -
630 adjusted_mode->vsync_start + adjusted_mode->vdisplay;
631 vunk2b = adjusted_mode->vtotal -
632 adjusted_mode->vsync_start + adjusted_mode->vtotal;
633
634 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
635 vsync_dur /= 2;
636 vsync_start_to_end /= 2;
637 vunk1 /= 2;
638 vunk2a /= 2;
639 vunk2b /= 2;
640 /* magic */
641 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) {
642 vsync_start_to_end -= 1;
643 vunk1 -= 1;
644 vunk2a -= 1;
645 vunk2b -= 1;
646 }
647 }
648
649 ret = RING_SPACE(evo, 17);
650 if (ret)
651 return ret;
652
653 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLOCK), 2);
654 OUT_RING(evo, adjusted_mode->clock | 0x800000);
655 OUT_RING(evo, (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 0);
656
657 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, DISPLAY_START), 5);
658 OUT_RING(evo, 0);
659 OUT_RING(evo, (adjusted_mode->vtotal << 16) | adjusted_mode->htotal);
660 OUT_RING(evo, (vsync_dur - 1) << 16 | (hsync_dur - 1));
661 OUT_RING(evo, (vsync_start_to_end - 1) << 16 |
662 (hsync_start_to_end - 1));
663 OUT_RING(evo, (vunk1 - 1) << 16 | (hunk1 - 1));
664
665 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
666 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK0824), 1);
667 OUT_RING(evo, (vunk2b - 1) << 16 | (vunk2a - 1));
668 } else {
669 OUT_RING(evo, 0);
670 OUT_RING(evo, 0);
671 }
672
673 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, UNK082C), 1);
674 OUT_RING(evo, 0);
675
676 /* This is the actual resolution of the mode. */
677 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, REAL_RES), 1);
678 OUT_RING(evo, (mode->vdisplay << 16) | mode->hdisplay);
679 BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CENTER_OFFSET), 1);
680 OUT_RING(evo, NV50_EVO_CRTC_SCALE_CENTER_OFFSET_VAL(0, 0));
681
682 nv_crtc->set_dither(nv_crtc, nv_connector->use_dithering, false);
683 nv_crtc->set_scale(nv_crtc, nv_connector->scaling_mode, false);
684
685 return nv50_crtc_do_mode_set_base(crtc, x, y, old_fb, false);
686}
687
688static int
689nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
690 struct drm_framebuffer *old_fb)
691{
692 return nv50_crtc_do_mode_set_base(crtc, x, y, old_fb, true);
693}
694
695static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
696 .dpms = nv50_crtc_dpms,
697 .prepare = nv50_crtc_prepare,
698 .commit = nv50_crtc_commit,
699 .mode_fixup = nv50_crtc_mode_fixup,
700 .mode_set = nv50_crtc_mode_set,
701 .mode_set_base = nv50_crtc_mode_set_base,
702 .load_lut = nv50_crtc_lut_load,
703};
704
705int
706nv50_crtc_create(struct drm_device *dev, int index)
707{
708 struct nouveau_crtc *nv_crtc = NULL;
709 int ret, i;
710
Maarten Maathuisef2bb502009-12-13 16:53:12 +0100711 NV_DEBUG_KMS(dev, "\n");
Ben Skeggs6ee73862009-12-11 19:24:15 +1000712
713 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
714 if (!nv_crtc)
715 return -ENOMEM;
716
717 nv_crtc->mode = kzalloc(sizeof(*nv_crtc->mode), GFP_KERNEL);
718 if (!nv_crtc->mode) {
719 kfree(nv_crtc);
720 return -ENOMEM;
721 }
722
723 /* Default CLUT parameters, will be activated on the hw upon
724 * first mode set.
725 */
726 for (i = 0; i < 256; i++) {
727 nv_crtc->lut.r[i] = i << 8;
728 nv_crtc->lut.g[i] = i << 8;
729 nv_crtc->lut.b[i] = i << 8;
730 }
731 nv_crtc->lut.depth = 0;
732
733 ret = nouveau_bo_new(dev, NULL, 4096, 0x100, TTM_PL_FLAG_VRAM,
734 0, 0x0000, false, true, &nv_crtc->lut.nvbo);
735 if (!ret) {
736 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
737 if (!ret)
738 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
739 if (ret)
740 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
741 }
742
743 if (ret) {
744 kfree(nv_crtc->mode);
745 kfree(nv_crtc);
746 return ret;
747 }
748
749 nv_crtc->index = index;
750
751 /* set function pointers */
752 nv_crtc->set_dither = nv50_crtc_set_dither;
753 nv_crtc->set_scale = nv50_crtc_set_scale;
754
755 drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
756 drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
757 drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
758
759 ret = nouveau_bo_new(dev, NULL, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
760 0, 0x0000, false, true, &nv_crtc->cursor.nvbo);
761 if (!ret) {
762 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
763 if (!ret)
764 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
765 if (ret)
766 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
767 }
768
769 nv50_cursor_init(nv_crtc);
770 return 0;
771}