blob: b869ba0fb6cffaaaf5a2d8ecd8ee9e7a50324cce [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
Ben Skeggs438d99e2011-07-05 16:48:06 +100034#include "nouveau_fb.h"
Ben Skeggs3a89cd02011-07-07 10:47:10 +100035#include "nv50_display.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100036
Ben Skeggsefd272a2011-07-05 11:58:58 +100037#define MEM_SYNC 0xe0000001
38#define MEM_VRAM 0xe0010000
Ben Skeggsc0cc92a2011-07-06 11:40:45 +100039#include "nouveau_dma.h"
Ben Skeggsefd272a2011-07-05 11:58:58 +100040
Ben Skeggs26f6d882011-07-04 16:25:18 +100041struct nvd0_display {
42 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100043 struct {
44 dma_addr_t handle;
45 u32 *ptr;
46 } evo[1];
Ben Skeggs3a89cd02011-07-07 10:47:10 +100047 struct {
48 struct dcb_entry *dis;
49 struct dcb_entry *ena;
50 int crtc;
51 int pclk;
52 u16 script;
53 } irq;
Ben Skeggs26f6d882011-07-04 16:25:18 +100054};
55
56static struct nvd0_display *
57nvd0_display(struct drm_device *dev)
58{
59 struct drm_nouveau_private *dev_priv = dev->dev_private;
60 return dev_priv->engine.display.priv;
61}
62
Ben Skeggs51beb422011-07-05 10:33:08 +100063static int
64evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
65{
66 int ret = 0;
67 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
68 nv_wr32(dev, 0x610704 + (id * 0x10), data);
69 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
70 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
71 ret = -EBUSY;
72 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
73 return ret;
74}
75
76static u32 *
77evo_wait(struct drm_device *dev, int id, int nr)
78{
79 struct nvd0_display *disp = nvd0_display(dev);
80 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
81
82 if (put + nr >= (PAGE_SIZE / 4)) {
83 disp->evo[id].ptr[put] = 0x20000000;
84
85 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
86 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
87 NV_ERROR(dev, "evo %d dma stalled\n", id);
88 return NULL;
89 }
90
91 put = 0;
92 }
93
94 return disp->evo[id].ptr + put;
95}
96
97static void
98evo_kick(u32 *push, struct drm_device *dev, int id)
99{
100 struct nvd0_display *disp = nvd0_display(dev);
101 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
102}
103
104#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
105#define evo_data(p,d) *((p)++) = (d)
106
Ben Skeggs83fc0832011-07-05 13:08:40 +1000107static struct drm_crtc *
108nvd0_display_crtc_get(struct drm_encoder *encoder)
109{
110 return nouveau_encoder(encoder)->crtc;
111}
112
Ben Skeggs26f6d882011-07-04 16:25:18 +1000113/******************************************************************************
Ben Skeggs438d99e2011-07-05 16:48:06 +1000114 * CRTC
115 *****************************************************************************/
116static int
117nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
118{
119 struct drm_device *dev = nv_crtc->base.dev;
120 u32 *push, mode;
121
122 mode = 0x00000000;
123 if (on) {
124 /* 0x11: 6bpc dynamic 2x2
125 * 0x13: 8bpc dynamic 2x2
126 * 0x19: 6bpc static 2x2
127 * 0x1b: 8bpc static 2x2
128 * 0x21: 6bpc temporal
129 * 0x23: 8bpc temporal
130 */
131 mode = 0x00000011;
132 }
133
134 push = evo_wait(dev, 0, 4);
135 if (push) {
136 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
137 evo_data(push, mode);
138 if (update) {
139 evo_mthd(push, 0x0080, 1);
140 evo_data(push, 0x00000000);
141 }
142 evo_kick(push, dev, 0);
143 }
144
145 return 0;
146}
147
148static int
149nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
150{
151 struct drm_display_mode *mode = &nv_crtc->base.mode;
152 struct drm_device *dev = nv_crtc->base.dev;
153 u32 *push;
154
155 /*XXX: actually handle scaling */
156
157 push = evo_wait(dev, 0, 16);
158 if (push) {
159 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
161 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
162 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
163 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
164 evo_data(push, 0x00000000);
165 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
166 evo_data(push, 0x00000000);
167 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
168 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
169 if (update) {
170 evo_mthd(push, 0x0080, 1);
171 evo_data(push, 0x00000000);
172 }
173 evo_kick(push, dev, 0);
174 }
175
176 return 0;
177}
178
179static int
180nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
181 int x, int y, bool update)
182{
183 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
184 u32 *push;
185
Ben Skeggs438d99e2011-07-05 16:48:06 +1000186 push = evo_wait(fb->dev, 0, 16);
187 if (push) {
188 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
189 evo_data(push, nvfb->nvbo->bo.offset >> 8);
190 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
191 evo_data(push, (fb->height << 16) | fb->width);
192 evo_data(push, nvfb->r_pitch);
193 evo_data(push, nvfb->r_format);
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000194 evo_data(push, nvfb->r_dma);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000195 evo_kick(push, fb->dev, 0);
196 }
197
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000198 nv_crtc->fb.tile_flags = nvfb->r_dma;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000199 return 0;
200}
201
202static void
203nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
204{
205 struct drm_device *dev = nv_crtc->base.dev;
206 u32 *push = evo_wait(dev, 0, 16);
207 if (push) {
208 if (show) {
209 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
210 evo_data(push, 0x85000000);
211 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
212 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
213 evo_data(push, MEM_VRAM);
214 } else {
215 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
216 evo_data(push, 0x05000000);
217 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
218 evo_data(push, 0x00000000);
219 }
220
221 if (update) {
222 evo_mthd(push, 0x0080, 1);
223 evo_data(push, 0x00000000);
224 }
225
226 evo_kick(push, dev, 0);
227 }
228}
229
230static void
231nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
232{
233}
234
235static void
236nvd0_crtc_prepare(struct drm_crtc *crtc)
237{
238 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
239 u32 *push;
240
241 push = evo_wait(crtc->dev, 0, 2);
242 if (push) {
243 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
244 evo_data(push, 0x00000000);
245 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
246 evo_data(push, 0x03000000);
247 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
248 evo_data(push, 0x00000000);
249 evo_kick(push, crtc->dev, 0);
250 }
251
252 nvd0_crtc_cursor_show(nv_crtc, false, false);
253}
254
255static void
256nvd0_crtc_commit(struct drm_crtc *crtc)
257{
258 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
259 u32 *push;
260
261 push = evo_wait(crtc->dev, 0, 32);
262 if (push) {
263 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
264 evo_data(push, nv_crtc->fb.tile_flags);
265 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
266 evo_data(push, 0x83000000);
267 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
268 evo_data(push, 0x00000000);
269 evo_data(push, 0x00000000);
270 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
271 evo_data(push, MEM_VRAM);
272 evo_kick(push, crtc->dev, 0);
273 }
274
275 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
276}
277
278static bool
279nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
280 struct drm_display_mode *adjusted_mode)
281{
282 return true;
283}
284
285static int
286nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
287{
288 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
289 int ret;
290
291 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
292 if (ret)
293 return ret;
294
295 if (old_fb) {
296 nvfb = nouveau_framebuffer(old_fb);
297 nouveau_bo_unpin(nvfb->nvbo);
298 }
299
300 return 0;
301}
302
303static int
304nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
305 struct drm_display_mode *mode, int x, int y,
306 struct drm_framebuffer *old_fb)
307{
308 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
309 struct nouveau_connector *nv_connector;
310 u32 htotal = mode->htotal;
311 u32 vtotal = mode->vtotal;
312 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
313 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
314 u32 hfrntp = mode->hsync_start - mode->hdisplay;
315 u32 vfrntp = mode->vsync_start - mode->vdisplay;
316 u32 hbackp = mode->htotal - mode->hsync_end;
317 u32 vbackp = mode->vtotal - mode->vsync_end;
318 u32 hss2be = hsyncw + hbackp;
319 u32 vss2be = vsyncw + vbackp;
320 u32 hss2de = htotal - hfrntp;
321 u32 vss2de = vtotal - vfrntp;
322 u32 hstart = 0;
323 u32 vstart = 0;
324 u32 *push;
325 int ret;
326
327 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
328 if (ret)
329 return ret;
330
331 push = evo_wait(crtc->dev, 0, 64);
332 if (push) {
333 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
334 evo_data(push, (vstart << 16) | hstart);
335 evo_data(push, (vtotal << 16) | htotal);
336 evo_data(push, (vsyncw << 16) | hsyncw);
337 evo_data(push, (vss2be << 16) | hss2be);
338 evo_data(push, (vss2de << 16) | hss2de);
339 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
340 evo_data(push, 0x00000000); /* ??? */
341 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
342 evo_data(push, mode->clock * 1000);
343 evo_data(push, 0x00200000); /* ??? */
344 evo_data(push, mode->clock * 1000);
345 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
346 evo_data(push, 0x31ec6000); /* ??? */
347 evo_kick(push, crtc->dev, 0);
348 }
349
350 nv_connector = nouveau_crtc_connector_get(nv_crtc);
351 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
352 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
353 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
354 return 0;
355}
356
357static int
358nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
359 struct drm_framebuffer *old_fb)
360{
361 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
362 int ret;
363
364 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
365 if (ret)
366 return ret;
367
368 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
369 return 0;
370}
371
372static int
373nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
374 struct drm_framebuffer *fb, int x, int y,
375 enum mode_set_atomic state)
376{
377 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
378 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
379 return 0;
380}
381
382static void
383nvd0_crtc_lut_load(struct drm_crtc *crtc)
384{
385 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
386 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
387 int i;
388
389 for (i = 0; i < 256; i++) {
390 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
391 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
392 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
393 }
394}
395
396static int
397nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
398 uint32_t handle, uint32_t width, uint32_t height)
399{
400 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
401 struct drm_device *dev = crtc->dev;
402 struct drm_gem_object *gem;
403 struct nouveau_bo *nvbo;
404 bool visible = (handle != 0);
405 int i, ret = 0;
406
407 if (visible) {
408 if (width != 64 || height != 64)
409 return -EINVAL;
410
411 gem = drm_gem_object_lookup(dev, file_priv, handle);
412 if (unlikely(!gem))
413 return -ENOENT;
414 nvbo = nouveau_gem_object(gem);
415
416 ret = nouveau_bo_map(nvbo);
417 if (ret == 0) {
418 for (i = 0; i < 64 * 64; i++) {
419 u32 v = nouveau_bo_rd32(nvbo, i);
420 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
421 }
422 nouveau_bo_unmap(nvbo);
423 }
424
425 drm_gem_object_unreference_unlocked(gem);
426 }
427
428 if (visible != nv_crtc->cursor.visible) {
429 nvd0_crtc_cursor_show(nv_crtc, visible, true);
430 nv_crtc->cursor.visible = visible;
431 }
432
433 return ret;
434}
435
436static int
437nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
438{
439 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
440 const u32 data = (y << 16) | x;
441
442 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
443 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
444 return 0;
445}
446
447static void
448nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
449 uint32_t start, uint32_t size)
450{
451 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
452 u32 end = max(start + size, (u32)256);
453 u32 i;
454
455 for (i = start; i < end; i++) {
456 nv_crtc->lut.r[i] = r[i];
457 nv_crtc->lut.g[i] = g[i];
458 nv_crtc->lut.b[i] = b[i];
459 }
460
461 nvd0_crtc_lut_load(crtc);
462}
463
464static void
465nvd0_crtc_destroy(struct drm_crtc *crtc)
466{
467 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
468 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
469 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
470 nouveau_bo_unmap(nv_crtc->lut.nvbo);
471 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
472 drm_crtc_cleanup(crtc);
473 kfree(crtc);
474}
475
476static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
477 .dpms = nvd0_crtc_dpms,
478 .prepare = nvd0_crtc_prepare,
479 .commit = nvd0_crtc_commit,
480 .mode_fixup = nvd0_crtc_mode_fixup,
481 .mode_set = nvd0_crtc_mode_set,
482 .mode_set_base = nvd0_crtc_mode_set_base,
483 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
484 .load_lut = nvd0_crtc_lut_load,
485};
486
487static const struct drm_crtc_funcs nvd0_crtc_func = {
488 .cursor_set = nvd0_crtc_cursor_set,
489 .cursor_move = nvd0_crtc_cursor_move,
490 .gamma_set = nvd0_crtc_gamma_set,
491 .set_config = drm_crtc_helper_set_config,
492 .destroy = nvd0_crtc_destroy,
493};
494
495static int
496nvd0_crtc_create(struct drm_device *dev, int index)
497{
498 struct nouveau_crtc *nv_crtc;
499 struct drm_crtc *crtc;
500 int ret, i;
501
502 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
503 if (!nv_crtc)
504 return -ENOMEM;
505
506 nv_crtc->index = index;
507 nv_crtc->set_dither = nvd0_crtc_set_dither;
508 nv_crtc->set_scale = nvd0_crtc_set_scale;
509 for (i = 0; i < 256; i++) {
510 nv_crtc->lut.r[i] = i << 8;
511 nv_crtc->lut.g[i] = i << 8;
512 nv_crtc->lut.b[i] = i << 8;
513 }
514
515 crtc = &nv_crtc->base;
516 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
517 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
518 drm_mode_crtc_set_gamma_size(crtc, 256);
519
520 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
521 0, 0x0000, &nv_crtc->cursor.nvbo);
522 if (!ret) {
523 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
524 if (!ret)
525 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
526 if (ret)
527 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
528 }
529
530 if (ret)
531 goto out;
532
533 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
534 0, 0x0000, &nv_crtc->lut.nvbo);
535 if (!ret) {
536 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
537 if (!ret)
538 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
539 if (ret)
540 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
541 }
542
543 if (ret)
544 goto out;
545
546 nvd0_crtc_lut_load(crtc);
547
548out:
549 if (ret)
550 nvd0_crtc_destroy(crtc);
551 return ret;
552}
553
554/******************************************************************************
Ben Skeggs26f6d882011-07-04 16:25:18 +1000555 * DAC
556 *****************************************************************************/
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000557static void
558nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
559{
560 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
561 struct drm_device *dev = encoder->dev;
562 int or = nv_encoder->or;
563 u32 dpms_ctrl;
564
565 dpms_ctrl = 0x80000000;
566 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
567 dpms_ctrl |= 0x00000001;
568 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
569 dpms_ctrl |= 0x00000004;
570
571 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
572 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
573 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
574}
575
576static bool
577nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
578 struct drm_display_mode *adjusted_mode)
579{
580 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
581 struct nouveau_connector *nv_connector;
582
583 nv_connector = nouveau_encoder_connector_get(nv_encoder);
584 if (nv_connector && nv_connector->native_mode) {
585 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
586 int id = adjusted_mode->base.id;
587 *adjusted_mode = *nv_connector->native_mode;
588 adjusted_mode->base.id = id;
589 }
590 }
591
592 return true;
593}
594
595static void
596nvd0_dac_prepare(struct drm_encoder *encoder)
597{
598}
599
600static void
601nvd0_dac_commit(struct drm_encoder *encoder)
602{
603}
604
605static void
606nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
607 struct drm_display_mode *adjusted_mode)
608{
609 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
610 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
611 u32 *push;
612
613 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
614
615 push = evo_wait(encoder->dev, 0, 2);
616 if (push) {
617 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
618 evo_data(push, 1 << nv_crtc->index);
619 evo_kick(push, encoder->dev, 0);
620 }
621
622 nv_encoder->crtc = encoder->crtc;
623}
624
625static void
626nvd0_dac_disconnect(struct drm_encoder *encoder)
627{
628 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
629 struct drm_device *dev = encoder->dev;
630 u32 *push;
631
632 if (nv_encoder->crtc) {
633 nvd0_crtc_prepare(nv_encoder->crtc);
634
635 push = evo_wait(dev, 0, 4);
636 if (push) {
637 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
638 evo_data(push, 0x00000000);
639 evo_mthd(push, 0x0080, 1);
640 evo_data(push, 0x00000000);
641 evo_kick(push, dev, 0);
642 }
643
644 nv_encoder->crtc = NULL;
645 }
646}
647
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000648static enum drm_connector_status
649nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
650{
651 return connector_status_disconnected;
652}
653
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000654static void
655nvd0_dac_destroy(struct drm_encoder *encoder)
656{
657 drm_encoder_cleanup(encoder);
658 kfree(encoder);
659}
660
661static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
662 .dpms = nvd0_dac_dpms,
663 .mode_fixup = nvd0_dac_mode_fixup,
664 .prepare = nvd0_dac_prepare,
665 .commit = nvd0_dac_commit,
666 .mode_set = nvd0_dac_mode_set,
667 .disable = nvd0_dac_disconnect,
668 .get_crtc = nvd0_display_crtc_get,
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000669 .detect = nvd0_dac_detect
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000670};
671
672static const struct drm_encoder_funcs nvd0_dac_func = {
673 .destroy = nvd0_dac_destroy,
674};
675
676static int
677nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
678{
679 struct drm_device *dev = connector->dev;
680 struct nouveau_encoder *nv_encoder;
681 struct drm_encoder *encoder;
682
683 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
684 if (!nv_encoder)
685 return -ENOMEM;
686 nv_encoder->dcb = dcbe;
687 nv_encoder->or = ffs(dcbe->or) - 1;
688
689 encoder = to_drm_encoder(nv_encoder);
690 encoder->possible_crtcs = dcbe->heads;
691 encoder->possible_clones = 0;
692 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
693 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
694
695 drm_mode_connector_attach_encoder(connector, encoder);
696 return 0;
697}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000698
699/******************************************************************************
700 * SOR
701 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000702static void
703nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
704{
705 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
706 struct drm_device *dev = encoder->dev;
707 struct drm_encoder *partner;
708 int or = nv_encoder->or;
709 u32 dpms_ctrl;
710
711 nv_encoder->last_dpms = mode;
712
713 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
714 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
715
716 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
717 continue;
718
719 if (nv_partner != nv_encoder &&
720 nv_partner->dcb->or == nv_encoder->or) {
721 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
722 return;
723 break;
724 }
725 }
726
727 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
728 dpms_ctrl |= 0x80000000;
729
730 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
731 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
732 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
733 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
734}
735
736static bool
737nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
738 struct drm_display_mode *adjusted_mode)
739{
740 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
741 struct nouveau_connector *nv_connector;
742
743 nv_connector = nouveau_encoder_connector_get(nv_encoder);
744 if (nv_connector && nv_connector->native_mode) {
745 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
746 int id = adjusted_mode->base.id;
747 *adjusted_mode = *nv_connector->native_mode;
748 adjusted_mode->base.id = id;
749 }
750 }
751
752 return true;
753}
754
755static void
756nvd0_sor_prepare(struct drm_encoder *encoder)
757{
758}
759
760static void
761nvd0_sor_commit(struct drm_encoder *encoder)
762{
763}
764
765static void
766nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
767 struct drm_display_mode *adjusted_mode)
768{
769 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
770 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
771 u32 mode_ctrl = (1 << nv_crtc->index);
772 u32 *push;
773
774 if (nv_encoder->dcb->sorconf.link & 1) {
775 if (adjusted_mode->clock < 165000)
776 mode_ctrl |= 0x00000100;
777 else
778 mode_ctrl |= 0x00000500;
779 } else {
780 mode_ctrl |= 0x00000200;
781 }
782
783 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
784
785 push = evo_wait(encoder->dev, 0, 2);
786 if (push) {
787 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
788 evo_data(push, mode_ctrl);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000789 evo_kick(push, encoder->dev, 0);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000790 }
791
792 nv_encoder->crtc = encoder->crtc;
793}
794
795static void
796nvd0_sor_disconnect(struct drm_encoder *encoder)
797{
798 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
799 struct drm_device *dev = encoder->dev;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000800 u32 *push;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000801
802 if (nv_encoder->crtc) {
Ben Skeggs438d99e2011-07-05 16:48:06 +1000803 nvd0_crtc_prepare(nv_encoder->crtc);
804
805 push = evo_wait(dev, 0, 4);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000806 if (push) {
807 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
808 evo_data(push, 0x00000000);
809 evo_mthd(push, 0x0080, 1);
810 evo_data(push, 0x00000000);
811 evo_kick(push, dev, 0);
812 }
813
814 nv_encoder->crtc = NULL;
815 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
816 }
817}
818
819static void
820nvd0_sor_destroy(struct drm_encoder *encoder)
821{
822 drm_encoder_cleanup(encoder);
823 kfree(encoder);
824}
825
826static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
827 .dpms = nvd0_sor_dpms,
828 .mode_fixup = nvd0_sor_mode_fixup,
829 .prepare = nvd0_sor_prepare,
830 .commit = nvd0_sor_commit,
831 .mode_set = nvd0_sor_mode_set,
832 .disable = nvd0_sor_disconnect,
833 .get_crtc = nvd0_display_crtc_get,
834};
835
836static const struct drm_encoder_funcs nvd0_sor_func = {
837 .destroy = nvd0_sor_destroy,
838};
839
840static int
841nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
842{
843 struct drm_device *dev = connector->dev;
844 struct nouveau_encoder *nv_encoder;
845 struct drm_encoder *encoder;
846
847 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
848 if (!nv_encoder)
849 return -ENOMEM;
850 nv_encoder->dcb = dcbe;
851 nv_encoder->or = ffs(dcbe->or) - 1;
852 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
853
854 encoder = to_drm_encoder(nv_encoder);
855 encoder->possible_crtcs = dcbe->heads;
856 encoder->possible_clones = 0;
857 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
858 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
859
860 drm_mode_connector_attach_encoder(connector, encoder);
861 return 0;
862}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000863
864/******************************************************************************
865 * IRQ
866 *****************************************************************************/
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000867static struct dcb_entry *
868lookup_dcb(struct drm_device *dev, int id, u32 mc)
869{
870 struct drm_nouveau_private *dev_priv = dev->dev_private;
871 int type, or, i;
872
873 if (id < 4) {
874 type = OUTPUT_ANALOG;
875 or = id;
876 } else {
877 type = OUTPUT_TMDS;
878 or = id - 4;
879 }
880
881 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
882 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
883 if (dcb->type == type && (dcb->or & (1 << or)))
884 return dcb;
885 }
886
887 NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
888 return NULL;
889}
890
Ben Skeggs46005222011-07-05 11:01:13 +1000891static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000892nvd0_display_unk1_handler(struct drm_device *dev)
893{
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000894 struct nvd0_display *disp = nvd0_display(dev);
895 struct dcb_entry *dcb;
896 u32 unkn, crtc = 0;
897 int i;
898
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000899 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
900 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000901
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000902 unkn = nv_rd32(dev, 0x6101d4);
903 if (!unkn) {
904 unkn = nv_rd32(dev, 0x6109d4);
905 crtc = 1;
906 }
907
908 disp->irq.ena = NULL;
909 disp->irq.dis = NULL;
910 disp->irq.crtc = crtc;
911 disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
912 disp->irq.pclk /= 1000;
913
914 for (i = 0; i < 8; i++) {
915 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
916 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
917
918 if (mcc & (1 << crtc))
919 disp->irq.dis = lookup_dcb(dev, i, mcc);
920
921 if (mcp & (1 << crtc)) {
922 disp->irq.ena = lookup_dcb(dev, i, mcp);
923 switch (disp->irq.ena->type) {
924 case OUTPUT_ANALOG:
925 disp->irq.script = 0x00ff;
926 break;
927 case OUTPUT_TMDS:
928 disp->irq.script = (mcp & 0x00000f00) >> 8;
929 if (disp->irq.pclk >= 165000)
930 disp->irq.script |= 0x0100;
931 break;
932 default:
933 disp->irq.script = 0xbeef;
934 break;
935 }
936 }
937 }
938
939 dcb = disp->irq.dis;
940 if (dcb)
941 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
942
Ben Skeggs270a5742011-07-05 14:16:05 +1000943 nv_wr32(dev, 0x6101d4, 0x00000000);
944 nv_wr32(dev, 0x6109d4, 0x00000000);
945 nv_wr32(dev, 0x6101d0, 0x80000000);
946}
947
948static void
949nvd0_display_unk2_handler(struct drm_device *dev)
950{
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000951 struct nvd0_display *disp = nvd0_display(dev);
952 struct dcb_entry *dcb;
953 int crtc = disp->irq.crtc;
954 int pclk = disp->irq.pclk;
955 int or;
956 u32 tmp;
957
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000958 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
959 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000960
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000961 dcb = disp->irq.dis;
962 disp->irq.dis = NULL;
963 if (dcb)
964 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
965
966 nv50_crtc_set_clock(dev, crtc, pclk);
967
968 dcb = disp->irq.ena;
969 if (!dcb)
970 goto ack;
971 or = ffs(dcb->or) - 1;
972
973 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
974
975 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
976 switch (dcb->type) {
977 case OUTPUT_ANALOG:
978 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
979 break;
980 case OUTPUT_TMDS:
981 if (disp->irq.pclk >= 165000)
982 tmp = 0x00000101;
983 else
984 tmp = 0x00000000;
985
986 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
987 break;
988 default:
989 break;
990 }
991
992ack:
Ben Skeggs270a5742011-07-05 14:16:05 +1000993 nv_wr32(dev, 0x6101d4, 0x00000000);
994 nv_wr32(dev, 0x6109d4, 0x00000000);
995 nv_wr32(dev, 0x6101d0, 0x80000000);
996}
997
998static void
999nvd0_display_unk4_handler(struct drm_device *dev)
1000{
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001001 struct nvd0_display *disp = nvd0_display(dev);
1002 struct dcb_entry *dcb;
1003 int crtc = disp->irq.crtc;
1004 int pclk = disp->irq.pclk;
1005
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001006 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
1007 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +10001008
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001009 dcb = disp->irq.ena;
1010 disp->irq.ena = NULL;
1011 if (!dcb)
1012 goto ack;
1013
1014 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1015
1016ack:
Ben Skeggs270a5742011-07-05 14:16:05 +10001017 nv_wr32(dev, 0x6101d4, 0x00000000);
1018 nv_wr32(dev, 0x6109d4, 0x00000000);
1019 nv_wr32(dev, 0x6101d0, 0x80000000);
1020}
1021
1022static void
Ben Skeggs46005222011-07-05 11:01:13 +10001023nvd0_display_intr(struct drm_device *dev)
1024{
1025 u32 intr = nv_rd32(dev, 0x610088);
1026
1027 if (intr & 0x00000002) {
1028 u32 stat = nv_rd32(dev, 0x61009c);
1029 int chid = ffs(stat) - 1;
1030 if (chid >= 0) {
1031 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1032 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1033 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1034
1035 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1036 "0x%08x 0x%08x\n",
1037 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1038 nv_wr32(dev, 0x61009c, (1 << chid));
1039 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1040 }
1041
1042 intr &= ~0x00000002;
1043 }
1044
Ben Skeggs270a5742011-07-05 14:16:05 +10001045 if (intr & 0x00100000) {
1046 u32 stat = nv_rd32(dev, 0x6100ac);
1047
1048 if (stat & 0x00000007) {
1049 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1050
1051 if (stat & 0x00000001)
1052 nvd0_display_unk1_handler(dev);
1053 if (stat & 0x00000002)
1054 nvd0_display_unk2_handler(dev);
1055 if (stat & 0x00000004)
1056 nvd0_display_unk4_handler(dev);
1057 stat &= ~0x00000007;
1058 }
1059
1060 if (stat) {
1061 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1062 nv_wr32(dev, 0x6100ac, stat);
1063 }
1064
1065 intr &= ~0x00100000;
1066 }
1067
Ben Skeggs46005222011-07-05 11:01:13 +10001068 if (intr & 0x01000000) {
1069 u32 stat = nv_rd32(dev, 0x6100bc);
1070 nv_wr32(dev, 0x6100bc, stat);
1071 intr &= ~0x01000000;
1072 }
1073
1074 if (intr & 0x02000000) {
1075 u32 stat = nv_rd32(dev, 0x6108bc);
1076 nv_wr32(dev, 0x6108bc, stat);
1077 intr &= ~0x02000000;
1078 }
1079
1080 if (intr)
1081 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1082}
Ben Skeggs26f6d882011-07-04 16:25:18 +10001083
1084/******************************************************************************
1085 * Init
1086 *****************************************************************************/
1087static void
1088nvd0_display_fini(struct drm_device *dev)
1089{
1090 int i;
1091
1092 /* fini cursors */
1093 for (i = 14; i >= 13; i--) {
1094 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
1095 continue;
1096
1097 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
1098 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
1099 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
1100 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
1101 }
1102
1103 /* fini master */
1104 if (nv_rd32(dev, 0x610490) & 0x00000010) {
1105 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
1106 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
1107 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
1108 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
1109 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
1110 }
1111}
1112
1113int
1114nvd0_display_init(struct drm_device *dev)
1115{
1116 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001117 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001118 int i;
1119
1120 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1121 nv_wr32(dev, 0x6100ac, 0x00000100);
1122 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1123 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1124 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1125 nv_rd32(dev, 0x6194e8));
1126 return -EBUSY;
1127 }
1128 }
1129
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001130 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1131 * work at all unless you do the SOR part below.
1132 */
1133 for (i = 0; i < 3; i++) {
1134 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1135 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1136 }
1137
1138 for (i = 0; i < 4; i++) {
1139 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1140 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1141 }
1142
1143 for (i = 0; i < 2; i++) {
1144 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1145 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1146 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1147 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1148 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1149 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1150 }
1151
1152 /* point at our hash table / objects, enable interrupts */
Ben Skeggs26f6d882011-07-04 16:25:18 +10001153 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +10001154 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001155
1156 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +10001157 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001158 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001159 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001160 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1161 nv_wr32(dev, 0x640000, 0x00000000);
1162 nv_wr32(dev, 0x610490, 0x01000013);
1163 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1164 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1165 nv_rd32(dev, 0x610490));
1166 return -EBUSY;
1167 }
1168 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1169 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1170
1171 /* init cursors */
1172 for (i = 13; i <= 14; i++) {
1173 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1174 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1175 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1176 nv_rd32(dev, 0x610490 + (i * 0x10)));
1177 return -EBUSY;
1178 }
1179
1180 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1181 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1182 }
1183
Ben Skeggsefd272a2011-07-05 11:58:58 +10001184 push = evo_wait(dev, 0, 32);
1185 if (!push)
1186 return -EBUSY;
1187 evo_mthd(push, 0x0088, 1);
1188 evo_data(push, MEM_SYNC);
1189 evo_mthd(push, 0x0084, 1);
1190 evo_data(push, 0x00000000);
1191 evo_mthd(push, 0x0084, 1);
1192 evo_data(push, 0x80000000);
1193 evo_mthd(push, 0x008c, 1);
1194 evo_data(push, 0x00000000);
1195 evo_kick(push, dev, 0);
1196
Ben Skeggs26f6d882011-07-04 16:25:18 +10001197 return 0;
1198}
1199
1200void
1201nvd0_display_destroy(struct drm_device *dev)
1202{
1203 struct drm_nouveau_private *dev_priv = dev->dev_private;
1204 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +10001205 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001206
1207 nvd0_display_fini(dev);
1208
Ben Skeggs51beb422011-07-05 10:33:08 +10001209 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001210 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +10001211 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +10001212
1213 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001214 kfree(disp);
1215}
1216
1217int
1218nvd0_display_create(struct drm_device *dev)
1219{
1220 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +10001221 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001222 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1223 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +10001224 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001225 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001226 struct dcb_entry *dcbe;
1227 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001228
1229 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1230 if (!disp)
1231 return -ENOMEM;
1232 dev_priv->engine.display.priv = disp;
1233
Ben Skeggs438d99e2011-07-05 16:48:06 +10001234 /* create crtc objects to represent the hw heads */
1235 for (i = 0; i < 2; i++) {
1236 ret = nvd0_crtc_create(dev, i);
1237 if (ret)
1238 goto out;
1239 }
1240
Ben Skeggs83fc0832011-07-05 13:08:40 +10001241 /* create encoder/connector objects based on VBIOS DCB table */
1242 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1243 connector = nouveau_connector_create(dev, dcbe->connector);
1244 if (IS_ERR(connector))
1245 continue;
1246
1247 if (dcbe->location != DCB_LOC_ON_CHIP) {
1248 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1249 dcbe->type, ffs(dcbe->or) - 1);
1250 continue;
1251 }
1252
1253 switch (dcbe->type) {
1254 case OUTPUT_TMDS:
1255 nvd0_sor_create(connector, dcbe);
1256 break;
Ben Skeggs8eaa9662011-07-06 15:25:47 +10001257 case OUTPUT_ANALOG:
1258 nvd0_dac_create(connector, dcbe);
1259 break;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001260 default:
1261 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1262 dcbe->type, ffs(dcbe->or) - 1);
1263 continue;
1264 }
1265 }
1266
1267 /* cull any connectors we created that don't have an encoder */
1268 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1269 if (connector->encoder_ids[0])
1270 continue;
1271
1272 NV_WARN(dev, "%s has no encoders, removing\n",
1273 drm_get_connector_name(connector));
1274 connector->funcs->destroy(connector);
1275 }
1276
Ben Skeggs46005222011-07-05 11:01:13 +10001277 /* setup interrupt handling */
1278 nouveau_irq_register(dev, 26, nvd0_display_intr);
1279
Ben Skeggs51beb422011-07-05 10:33:08 +10001280 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +10001281 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1282 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001283 if (ret)
1284 goto out;
1285
Ben Skeggsefd272a2011-07-05 11:58:58 +10001286 nv_wo32(disp->mem, 0x1000, 0x00000049);
1287 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1288 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1289 nv_wo32(disp->mem, 0x100c, 0x00000000);
1290 nv_wo32(disp->mem, 0x1010, 0x00000000);
1291 nv_wo32(disp->mem, 0x1014, 0x00000000);
1292 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1293 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1294
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001295 nv_wo32(disp->mem, 0x1020, 0x00000049);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001296 nv_wo32(disp->mem, 0x1024, 0x00000000);
1297 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1298 nv_wo32(disp->mem, 0x102c, 0x00000000);
1299 nv_wo32(disp->mem, 0x1030, 0x00000000);
1300 nv_wo32(disp->mem, 0x1034, 0x00000000);
1301 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1302 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1303
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001304 nv_wo32(disp->mem, 0x1040, 0x00000009);
1305 nv_wo32(disp->mem, 0x1044, 0x00000000);
1306 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1307 nv_wo32(disp->mem, 0x104c, 0x00000000);
1308 nv_wo32(disp->mem, 0x1050, 0x00000000);
1309 nv_wo32(disp->mem, 0x1054, 0x00000000);
1310 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1311 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1312
1313 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1314 nv_wo32(disp->mem, 0x1064, 0x00000000);
1315 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1316 nv_wo32(disp->mem, 0x106c, 0x00000000);
1317 nv_wo32(disp->mem, 0x1070, 0x00000000);
1318 nv_wo32(disp->mem, 0x1074, 0x00000000);
1319 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1320 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1321
Ben Skeggsefd272a2011-07-05 11:58:58 +10001322 pinstmem->flush(dev);
1323
Ben Skeggs51beb422011-07-05 10:33:08 +10001324 /* push buffers for evo channels */
1325 disp->evo[0].ptr =
1326 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1327 if (!disp->evo[0].ptr) {
1328 ret = -ENOMEM;
1329 goto out;
1330 }
1331
Ben Skeggs26f6d882011-07-04 16:25:18 +10001332 ret = nvd0_display_init(dev);
1333 if (ret)
1334 goto out;
1335
1336out:
1337 if (ret)
1338 nvd0_display_destroy(dev);
1339 return ret;
1340}