blob: 376acc1619a0b738951ea28508df780b9a62419a [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
Ben Skeggs438d99e2011-07-05 16:48:06 +100034#include "nouveau_fb.h"
Ben Skeggs3a89cd02011-07-07 10:47:10 +100035#include "nv50_display.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100036
Ben Skeggsefd272a2011-07-05 11:58:58 +100037#define MEM_SYNC 0xe0000001
38#define MEM_VRAM 0xe0010000
Ben Skeggsc0cc92a2011-07-06 11:40:45 +100039#include "nouveau_dma.h"
Ben Skeggsefd272a2011-07-05 11:58:58 +100040
Ben Skeggs26f6d882011-07-04 16:25:18 +100041struct nvd0_display {
42 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100043 struct {
44 dma_addr_t handle;
45 u32 *ptr;
46 } evo[1];
Ben Skeggs3a89cd02011-07-07 10:47:10 +100047 struct {
48 struct dcb_entry *dis;
49 struct dcb_entry *ena;
50 int crtc;
51 int pclk;
52 u16 script;
53 } irq;
Ben Skeggs26f6d882011-07-04 16:25:18 +100054};
55
56static struct nvd0_display *
57nvd0_display(struct drm_device *dev)
58{
59 struct drm_nouveau_private *dev_priv = dev->dev_private;
60 return dev_priv->engine.display.priv;
61}
62
Ben Skeggs51beb422011-07-05 10:33:08 +100063static int
64evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
65{
66 int ret = 0;
67 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
68 nv_wr32(dev, 0x610704 + (id * 0x10), data);
69 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
70 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
71 ret = -EBUSY;
72 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
73 return ret;
74}
75
76static u32 *
77evo_wait(struct drm_device *dev, int id, int nr)
78{
79 struct nvd0_display *disp = nvd0_display(dev);
80 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
81
82 if (put + nr >= (PAGE_SIZE / 4)) {
83 disp->evo[id].ptr[put] = 0x20000000;
84
85 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
86 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
87 NV_ERROR(dev, "evo %d dma stalled\n", id);
88 return NULL;
89 }
90
91 put = 0;
92 }
93
94 return disp->evo[id].ptr + put;
95}
96
97static void
98evo_kick(u32 *push, struct drm_device *dev, int id)
99{
100 struct nvd0_display *disp = nvd0_display(dev);
101 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
102}
103
104#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
105#define evo_data(p,d) *((p)++) = (d)
106
Ben Skeggs83fc0832011-07-05 13:08:40 +1000107static struct drm_crtc *
108nvd0_display_crtc_get(struct drm_encoder *encoder)
109{
110 return nouveau_encoder(encoder)->crtc;
111}
112
Ben Skeggs26f6d882011-07-04 16:25:18 +1000113/******************************************************************************
Ben Skeggs438d99e2011-07-05 16:48:06 +1000114 * CRTC
115 *****************************************************************************/
116static int
117nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
118{
119 struct drm_device *dev = nv_crtc->base.dev;
120 u32 *push, mode;
121
122 mode = 0x00000000;
123 if (on) {
124 /* 0x11: 6bpc dynamic 2x2
125 * 0x13: 8bpc dynamic 2x2
126 * 0x19: 6bpc static 2x2
127 * 0x1b: 8bpc static 2x2
128 * 0x21: 6bpc temporal
129 * 0x23: 8bpc temporal
130 */
131 mode = 0x00000011;
132 }
133
134 push = evo_wait(dev, 0, 4);
135 if (push) {
136 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
137 evo_data(push, mode);
138 if (update) {
139 evo_mthd(push, 0x0080, 1);
140 evo_data(push, 0x00000000);
141 }
142 evo_kick(push, dev, 0);
143 }
144
145 return 0;
146}
147
148static int
149nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
150{
151 struct drm_display_mode *mode = &nv_crtc->base.mode;
152 struct drm_device *dev = nv_crtc->base.dev;
153 u32 *push;
154
155 /*XXX: actually handle scaling */
156
157 push = evo_wait(dev, 0, 16);
158 if (push) {
159 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
161 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
162 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
163 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
164 evo_data(push, 0x00000000);
165 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
166 evo_data(push, 0x00000000);
167 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
168 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
169 if (update) {
170 evo_mthd(push, 0x0080, 1);
171 evo_data(push, 0x00000000);
172 }
173 evo_kick(push, dev, 0);
174 }
175
176 return 0;
177}
178
179static int
180nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
181 int x, int y, bool update)
182{
183 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
184 u32 *push;
185
Ben Skeggs438d99e2011-07-05 16:48:06 +1000186 push = evo_wait(fb->dev, 0, 16);
187 if (push) {
188 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
189 evo_data(push, nvfb->nvbo->bo.offset >> 8);
190 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
191 evo_data(push, (fb->height << 16) | fb->width);
192 evo_data(push, nvfb->r_pitch);
193 evo_data(push, nvfb->r_format);
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000194 evo_data(push, nvfb->r_dma);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000195 evo_kick(push, fb->dev, 0);
196 }
197
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000198 nv_crtc->fb.tile_flags = nvfb->r_dma;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000199 return 0;
200}
201
202static void
203nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
204{
205 struct drm_device *dev = nv_crtc->base.dev;
206 u32 *push = evo_wait(dev, 0, 16);
207 if (push) {
208 if (show) {
209 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
210 evo_data(push, 0x85000000);
211 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
212 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
213 evo_data(push, MEM_VRAM);
214 } else {
215 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
216 evo_data(push, 0x05000000);
217 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
218 evo_data(push, 0x00000000);
219 }
220
221 if (update) {
222 evo_mthd(push, 0x0080, 1);
223 evo_data(push, 0x00000000);
224 }
225
226 evo_kick(push, dev, 0);
227 }
228}
229
230static void
231nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
232{
233}
234
235static void
236nvd0_crtc_prepare(struct drm_crtc *crtc)
237{
238 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
239 u32 *push;
240
241 push = evo_wait(crtc->dev, 0, 2);
242 if (push) {
243 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
244 evo_data(push, 0x00000000);
245 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
246 evo_data(push, 0x03000000);
247 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
248 evo_data(push, 0x00000000);
249 evo_kick(push, crtc->dev, 0);
250 }
251
252 nvd0_crtc_cursor_show(nv_crtc, false, false);
253}
254
255static void
256nvd0_crtc_commit(struct drm_crtc *crtc)
257{
258 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
259 u32 *push;
260
261 push = evo_wait(crtc->dev, 0, 32);
262 if (push) {
263 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
264 evo_data(push, nv_crtc->fb.tile_flags);
265 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
266 evo_data(push, 0x83000000);
267 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
268 evo_data(push, 0x00000000);
269 evo_data(push, 0x00000000);
270 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
271 evo_data(push, MEM_VRAM);
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000272 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
273 evo_data(push, 0xffffff00);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000274 evo_kick(push, crtc->dev, 0);
275 }
276
277 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
278}
279
280static bool
281nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
282 struct drm_display_mode *adjusted_mode)
283{
284 return true;
285}
286
287static int
288nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
289{
290 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
291 int ret;
292
293 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
294 if (ret)
295 return ret;
296
297 if (old_fb) {
298 nvfb = nouveau_framebuffer(old_fb);
299 nouveau_bo_unpin(nvfb->nvbo);
300 }
301
302 return 0;
303}
304
305static int
306nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
307 struct drm_display_mode *mode, int x, int y,
308 struct drm_framebuffer *old_fb)
309{
310 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
311 struct nouveau_connector *nv_connector;
312 u32 htotal = mode->htotal;
313 u32 vtotal = mode->vtotal;
314 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
315 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
316 u32 hfrntp = mode->hsync_start - mode->hdisplay;
317 u32 vfrntp = mode->vsync_start - mode->vdisplay;
318 u32 hbackp = mode->htotal - mode->hsync_end;
319 u32 vbackp = mode->vtotal - mode->vsync_end;
320 u32 hss2be = hsyncw + hbackp;
321 u32 vss2be = vsyncw + vbackp;
322 u32 hss2de = htotal - hfrntp;
323 u32 vss2de = vtotal - vfrntp;
324 u32 hstart = 0;
325 u32 vstart = 0;
326 u32 *push;
327 int ret;
328
329 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
330 if (ret)
331 return ret;
332
333 push = evo_wait(crtc->dev, 0, 64);
334 if (push) {
335 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
336 evo_data(push, (vstart << 16) | hstart);
337 evo_data(push, (vtotal << 16) | htotal);
338 evo_data(push, (vsyncw << 16) | hsyncw);
339 evo_data(push, (vss2be << 16) | hss2be);
340 evo_data(push, (vss2de << 16) | hss2de);
341 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
342 evo_data(push, 0x00000000); /* ??? */
343 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
344 evo_data(push, mode->clock * 1000);
345 evo_data(push, 0x00200000); /* ??? */
346 evo_data(push, mode->clock * 1000);
347 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
348 evo_data(push, 0x31ec6000); /* ??? */
349 evo_kick(push, crtc->dev, 0);
350 }
351
352 nv_connector = nouveau_crtc_connector_get(nv_crtc);
353 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
354 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
355 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
356 return 0;
357}
358
359static int
360nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
361 struct drm_framebuffer *old_fb)
362{
363 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
364 int ret;
365
366 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
367 if (ret)
368 return ret;
369
370 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
371 return 0;
372}
373
374static int
375nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
376 struct drm_framebuffer *fb, int x, int y,
377 enum mode_set_atomic state)
378{
379 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
380 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
381 return 0;
382}
383
384static void
385nvd0_crtc_lut_load(struct drm_crtc *crtc)
386{
387 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
388 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
389 int i;
390
391 for (i = 0; i < 256; i++) {
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000392 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
393 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
394 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000395 }
396}
397
398static int
399nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
400 uint32_t handle, uint32_t width, uint32_t height)
401{
402 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
403 struct drm_device *dev = crtc->dev;
404 struct drm_gem_object *gem;
405 struct nouveau_bo *nvbo;
406 bool visible = (handle != 0);
407 int i, ret = 0;
408
409 if (visible) {
410 if (width != 64 || height != 64)
411 return -EINVAL;
412
413 gem = drm_gem_object_lookup(dev, file_priv, handle);
414 if (unlikely(!gem))
415 return -ENOENT;
416 nvbo = nouveau_gem_object(gem);
417
418 ret = nouveau_bo_map(nvbo);
419 if (ret == 0) {
420 for (i = 0; i < 64 * 64; i++) {
421 u32 v = nouveau_bo_rd32(nvbo, i);
422 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
423 }
424 nouveau_bo_unmap(nvbo);
425 }
426
427 drm_gem_object_unreference_unlocked(gem);
428 }
429
430 if (visible != nv_crtc->cursor.visible) {
431 nvd0_crtc_cursor_show(nv_crtc, visible, true);
432 nv_crtc->cursor.visible = visible;
433 }
434
435 return ret;
436}
437
438static int
439nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
440{
441 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
442 const u32 data = (y << 16) | x;
443
444 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
445 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
446 return 0;
447}
448
449static void
450nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
451 uint32_t start, uint32_t size)
452{
453 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
454 u32 end = max(start + size, (u32)256);
455 u32 i;
456
457 for (i = start; i < end; i++) {
458 nv_crtc->lut.r[i] = r[i];
459 nv_crtc->lut.g[i] = g[i];
460 nv_crtc->lut.b[i] = b[i];
461 }
462
463 nvd0_crtc_lut_load(crtc);
464}
465
466static void
467nvd0_crtc_destroy(struct drm_crtc *crtc)
468{
469 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
470 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
471 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
472 nouveau_bo_unmap(nv_crtc->lut.nvbo);
473 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
474 drm_crtc_cleanup(crtc);
475 kfree(crtc);
476}
477
478static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
479 .dpms = nvd0_crtc_dpms,
480 .prepare = nvd0_crtc_prepare,
481 .commit = nvd0_crtc_commit,
482 .mode_fixup = nvd0_crtc_mode_fixup,
483 .mode_set = nvd0_crtc_mode_set,
484 .mode_set_base = nvd0_crtc_mode_set_base,
485 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
486 .load_lut = nvd0_crtc_lut_load,
487};
488
489static const struct drm_crtc_funcs nvd0_crtc_func = {
490 .cursor_set = nvd0_crtc_cursor_set,
491 .cursor_move = nvd0_crtc_cursor_move,
492 .gamma_set = nvd0_crtc_gamma_set,
493 .set_config = drm_crtc_helper_set_config,
494 .destroy = nvd0_crtc_destroy,
495};
496
497static int
498nvd0_crtc_create(struct drm_device *dev, int index)
499{
500 struct nouveau_crtc *nv_crtc;
501 struct drm_crtc *crtc;
502 int ret, i;
503
504 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
505 if (!nv_crtc)
506 return -ENOMEM;
507
508 nv_crtc->index = index;
509 nv_crtc->set_dither = nvd0_crtc_set_dither;
510 nv_crtc->set_scale = nvd0_crtc_set_scale;
511 for (i = 0; i < 256; i++) {
512 nv_crtc->lut.r[i] = i << 8;
513 nv_crtc->lut.g[i] = i << 8;
514 nv_crtc->lut.b[i] = i << 8;
515 }
516
517 crtc = &nv_crtc->base;
518 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
519 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
520 drm_mode_crtc_set_gamma_size(crtc, 256);
521
522 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
523 0, 0x0000, &nv_crtc->cursor.nvbo);
524 if (!ret) {
525 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
526 if (!ret)
527 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
528 if (ret)
529 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
530 }
531
532 if (ret)
533 goto out;
534
Ben Skeggs8ea0d4a2011-07-07 14:49:24 +1000535 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
Ben Skeggs438d99e2011-07-05 16:48:06 +1000536 0, 0x0000, &nv_crtc->lut.nvbo);
537 if (!ret) {
538 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
539 if (!ret)
540 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
541 if (ret)
542 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
543 }
544
545 if (ret)
546 goto out;
547
548 nvd0_crtc_lut_load(crtc);
549
550out:
551 if (ret)
552 nvd0_crtc_destroy(crtc);
553 return ret;
554}
555
556/******************************************************************************
Ben Skeggs26f6d882011-07-04 16:25:18 +1000557 * DAC
558 *****************************************************************************/
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000559static void
560nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
561{
562 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
563 struct drm_device *dev = encoder->dev;
564 int or = nv_encoder->or;
565 u32 dpms_ctrl;
566
567 dpms_ctrl = 0x80000000;
568 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
569 dpms_ctrl |= 0x00000001;
570 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
571 dpms_ctrl |= 0x00000004;
572
573 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
574 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
575 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
576}
577
578static bool
579nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
580 struct drm_display_mode *adjusted_mode)
581{
582 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
583 struct nouveau_connector *nv_connector;
584
585 nv_connector = nouveau_encoder_connector_get(nv_encoder);
586 if (nv_connector && nv_connector->native_mode) {
587 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
588 int id = adjusted_mode->base.id;
589 *adjusted_mode = *nv_connector->native_mode;
590 adjusted_mode->base.id = id;
591 }
592 }
593
594 return true;
595}
596
597static void
598nvd0_dac_prepare(struct drm_encoder *encoder)
599{
600}
601
602static void
603nvd0_dac_commit(struct drm_encoder *encoder)
604{
605}
606
607static void
608nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
609 struct drm_display_mode *adjusted_mode)
610{
611 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
612 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
613 u32 *push;
614
615 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
616
617 push = evo_wait(encoder->dev, 0, 2);
618 if (push) {
619 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
620 evo_data(push, 1 << nv_crtc->index);
621 evo_kick(push, encoder->dev, 0);
622 }
623
624 nv_encoder->crtc = encoder->crtc;
625}
626
627static void
628nvd0_dac_disconnect(struct drm_encoder *encoder)
629{
630 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
631 struct drm_device *dev = encoder->dev;
632 u32 *push;
633
634 if (nv_encoder->crtc) {
635 nvd0_crtc_prepare(nv_encoder->crtc);
636
637 push = evo_wait(dev, 0, 4);
638 if (push) {
639 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
640 evo_data(push, 0x00000000);
641 evo_mthd(push, 0x0080, 1);
642 evo_data(push, 0x00000000);
643 evo_kick(push, dev, 0);
644 }
645
646 nv_encoder->crtc = NULL;
647 }
648}
649
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000650static enum drm_connector_status
651nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
652{
653 return connector_status_disconnected;
654}
655
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000656static void
657nvd0_dac_destroy(struct drm_encoder *encoder)
658{
659 drm_encoder_cleanup(encoder);
660 kfree(encoder);
661}
662
663static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
664 .dpms = nvd0_dac_dpms,
665 .mode_fixup = nvd0_dac_mode_fixup,
666 .prepare = nvd0_dac_prepare,
667 .commit = nvd0_dac_commit,
668 .mode_set = nvd0_dac_mode_set,
669 .disable = nvd0_dac_disconnect,
670 .get_crtc = nvd0_display_crtc_get,
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000671 .detect = nvd0_dac_detect
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000672};
673
674static const struct drm_encoder_funcs nvd0_dac_func = {
675 .destroy = nvd0_dac_destroy,
676};
677
678static int
679nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
680{
681 struct drm_device *dev = connector->dev;
682 struct nouveau_encoder *nv_encoder;
683 struct drm_encoder *encoder;
684
685 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
686 if (!nv_encoder)
687 return -ENOMEM;
688 nv_encoder->dcb = dcbe;
689 nv_encoder->or = ffs(dcbe->or) - 1;
690
691 encoder = to_drm_encoder(nv_encoder);
692 encoder->possible_crtcs = dcbe->heads;
693 encoder->possible_clones = 0;
694 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
695 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
696
697 drm_mode_connector_attach_encoder(connector, encoder);
698 return 0;
699}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000700
701/******************************************************************************
702 * SOR
703 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000704static void
705nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
706{
707 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
708 struct drm_device *dev = encoder->dev;
709 struct drm_encoder *partner;
710 int or = nv_encoder->or;
711 u32 dpms_ctrl;
712
713 nv_encoder->last_dpms = mode;
714
715 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
716 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
717
718 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
719 continue;
720
721 if (nv_partner != nv_encoder &&
722 nv_partner->dcb->or == nv_encoder->or) {
723 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
724 return;
725 break;
726 }
727 }
728
729 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
730 dpms_ctrl |= 0x80000000;
731
732 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
733 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
734 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
735 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
736}
737
738static bool
739nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
740 struct drm_display_mode *adjusted_mode)
741{
742 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
743 struct nouveau_connector *nv_connector;
744
745 nv_connector = nouveau_encoder_connector_get(nv_encoder);
746 if (nv_connector && nv_connector->native_mode) {
747 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
748 int id = adjusted_mode->base.id;
749 *adjusted_mode = *nv_connector->native_mode;
750 adjusted_mode->base.id = id;
751 }
752 }
753
754 return true;
755}
756
757static void
758nvd0_sor_prepare(struct drm_encoder *encoder)
759{
760}
761
762static void
763nvd0_sor_commit(struct drm_encoder *encoder)
764{
765}
766
767static void
768nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
769 struct drm_display_mode *adjusted_mode)
770{
771 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
772 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
773 u32 mode_ctrl = (1 << nv_crtc->index);
774 u32 *push;
775
776 if (nv_encoder->dcb->sorconf.link & 1) {
777 if (adjusted_mode->clock < 165000)
778 mode_ctrl |= 0x00000100;
779 else
780 mode_ctrl |= 0x00000500;
781 } else {
782 mode_ctrl |= 0x00000200;
783 }
784
785 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
786
787 push = evo_wait(encoder->dev, 0, 2);
788 if (push) {
789 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
790 evo_data(push, mode_ctrl);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000791 evo_kick(push, encoder->dev, 0);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000792 }
793
794 nv_encoder->crtc = encoder->crtc;
795}
796
797static void
798nvd0_sor_disconnect(struct drm_encoder *encoder)
799{
800 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
801 struct drm_device *dev = encoder->dev;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000802 u32 *push;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000803
804 if (nv_encoder->crtc) {
Ben Skeggs438d99e2011-07-05 16:48:06 +1000805 nvd0_crtc_prepare(nv_encoder->crtc);
806
807 push = evo_wait(dev, 0, 4);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000808 if (push) {
809 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
810 evo_data(push, 0x00000000);
811 evo_mthd(push, 0x0080, 1);
812 evo_data(push, 0x00000000);
813 evo_kick(push, dev, 0);
814 }
815
816 nv_encoder->crtc = NULL;
817 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
818 }
819}
820
821static void
822nvd0_sor_destroy(struct drm_encoder *encoder)
823{
824 drm_encoder_cleanup(encoder);
825 kfree(encoder);
826}
827
828static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
829 .dpms = nvd0_sor_dpms,
830 .mode_fixup = nvd0_sor_mode_fixup,
831 .prepare = nvd0_sor_prepare,
832 .commit = nvd0_sor_commit,
833 .mode_set = nvd0_sor_mode_set,
834 .disable = nvd0_sor_disconnect,
835 .get_crtc = nvd0_display_crtc_get,
836};
837
838static const struct drm_encoder_funcs nvd0_sor_func = {
839 .destroy = nvd0_sor_destroy,
840};
841
842static int
843nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
844{
845 struct drm_device *dev = connector->dev;
846 struct nouveau_encoder *nv_encoder;
847 struct drm_encoder *encoder;
848
849 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
850 if (!nv_encoder)
851 return -ENOMEM;
852 nv_encoder->dcb = dcbe;
853 nv_encoder->or = ffs(dcbe->or) - 1;
854 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
855
856 encoder = to_drm_encoder(nv_encoder);
857 encoder->possible_crtcs = dcbe->heads;
858 encoder->possible_clones = 0;
859 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
860 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
861
862 drm_mode_connector_attach_encoder(connector, encoder);
863 return 0;
864}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000865
866/******************************************************************************
867 * IRQ
868 *****************************************************************************/
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000869static struct dcb_entry *
870lookup_dcb(struct drm_device *dev, int id, u32 mc)
871{
872 struct drm_nouveau_private *dev_priv = dev->dev_private;
873 int type, or, i;
874
875 if (id < 4) {
876 type = OUTPUT_ANALOG;
877 or = id;
878 } else {
879 type = OUTPUT_TMDS;
880 or = id - 4;
881 }
882
883 for (i = 0; i < dev_priv->vbios.dcb.entries; i++) {
884 struct dcb_entry *dcb = &dev_priv->vbios.dcb.entry[i];
885 if (dcb->type == type && (dcb->or & (1 << or)))
886 return dcb;
887 }
888
889 NV_INFO(dev, "PDISP: DCB for %d/0x%08x not found\n", id, mc);
890 return NULL;
891}
892
Ben Skeggs46005222011-07-05 11:01:13 +1000893static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000894nvd0_display_unk1_handler(struct drm_device *dev)
895{
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000896 struct nvd0_display *disp = nvd0_display(dev);
897 struct dcb_entry *dcb;
898 u32 unkn, crtc = 0;
899 int i;
900
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000901 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
902 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000903
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000904 unkn = nv_rd32(dev, 0x6101d4);
905 if (!unkn) {
906 unkn = nv_rd32(dev, 0x6109d4);
907 crtc = 1;
908 }
909
910 disp->irq.ena = NULL;
911 disp->irq.dis = NULL;
912 disp->irq.crtc = crtc;
913 disp->irq.pclk = nv_rd32(dev, 0x660450 + (disp->irq.crtc * 0x300));
914 disp->irq.pclk /= 1000;
915
916 for (i = 0; i < 8; i++) {
917 u32 mcc = nv_rd32(dev, 0x640180 + (i * 0x20));
918 u32 mcp = nv_rd32(dev, 0x660180 + (i * 0x20));
919
920 if (mcc & (1 << crtc))
921 disp->irq.dis = lookup_dcb(dev, i, mcc);
922
923 if (mcp & (1 << crtc)) {
924 disp->irq.ena = lookup_dcb(dev, i, mcp);
925 switch (disp->irq.ena->type) {
926 case OUTPUT_ANALOG:
927 disp->irq.script = 0x00ff;
928 break;
929 case OUTPUT_TMDS:
930 disp->irq.script = (mcp & 0x00000f00) >> 8;
931 if (disp->irq.pclk >= 165000)
932 disp->irq.script |= 0x0100;
933 break;
934 default:
935 disp->irq.script = 0xbeef;
936 break;
937 }
938 }
939 }
940
941 dcb = disp->irq.dis;
942 if (dcb)
943 nouveau_bios_run_display_table(dev, 0x0000, -1, dcb, crtc);
944
Ben Skeggs270a5742011-07-05 14:16:05 +1000945 nv_wr32(dev, 0x6101d4, 0x00000000);
946 nv_wr32(dev, 0x6109d4, 0x00000000);
947 nv_wr32(dev, 0x6101d0, 0x80000000);
948}
949
950static void
951nvd0_display_unk2_handler(struct drm_device *dev)
952{
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000953 struct nvd0_display *disp = nvd0_display(dev);
954 struct dcb_entry *dcb;
955 int crtc = disp->irq.crtc;
956 int pclk = disp->irq.pclk;
957 int or;
958 u32 tmp;
959
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000960 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
961 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000962
Ben Skeggs3a89cd02011-07-07 10:47:10 +1000963 dcb = disp->irq.dis;
964 disp->irq.dis = NULL;
965 if (dcb)
966 nouveau_bios_run_display_table(dev, 0x0000, -2, dcb, crtc);
967
968 nv50_crtc_set_clock(dev, crtc, pclk);
969
970 dcb = disp->irq.ena;
971 if (!dcb)
972 goto ack;
973 or = ffs(dcb->or) - 1;
974
975 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
976
977 nv_wr32(dev, 0x612200 + (crtc * 0x800), 0x00000000);
978 switch (dcb->type) {
979 case OUTPUT_ANALOG:
980 nv_wr32(dev, 0x612280 + (or * 0x800), 0x00000000);
981 break;
982 case OUTPUT_TMDS:
983 if (disp->irq.pclk >= 165000)
984 tmp = 0x00000101;
985 else
986 tmp = 0x00000000;
987
988 nv_mask(dev, 0x612300 + (or * 0x800), 0x00000707, tmp);
989 break;
990 default:
991 break;
992 }
993
994ack:
Ben Skeggs270a5742011-07-05 14:16:05 +1000995 nv_wr32(dev, 0x6101d4, 0x00000000);
996 nv_wr32(dev, 0x6109d4, 0x00000000);
997 nv_wr32(dev, 0x6101d0, 0x80000000);
998}
999
1000static void
1001nvd0_display_unk4_handler(struct drm_device *dev)
1002{
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001003 struct nvd0_display *disp = nvd0_display(dev);
1004 struct dcb_entry *dcb;
1005 int crtc = disp->irq.crtc;
1006 int pclk = disp->irq.pclk;
1007
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001008 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
1009 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +10001010
Ben Skeggs3a89cd02011-07-07 10:47:10 +10001011 dcb = disp->irq.ena;
1012 disp->irq.ena = NULL;
1013 if (!dcb)
1014 goto ack;
1015
1016 nouveau_bios_run_display_table(dev, disp->irq.script, pclk, dcb, crtc);
1017
1018ack:
Ben Skeggs270a5742011-07-05 14:16:05 +10001019 nv_wr32(dev, 0x6101d4, 0x00000000);
1020 nv_wr32(dev, 0x6109d4, 0x00000000);
1021 nv_wr32(dev, 0x6101d0, 0x80000000);
1022}
1023
1024static void
Ben Skeggs46005222011-07-05 11:01:13 +10001025nvd0_display_intr(struct drm_device *dev)
1026{
1027 u32 intr = nv_rd32(dev, 0x610088);
1028
1029 if (intr & 0x00000002) {
1030 u32 stat = nv_rd32(dev, 0x61009c);
1031 int chid = ffs(stat) - 1;
1032 if (chid >= 0) {
1033 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
1034 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
1035 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
1036
1037 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
1038 "0x%08x 0x%08x\n",
1039 chid, (mthd & 0x0000ffc), data, mthd, unkn);
1040 nv_wr32(dev, 0x61009c, (1 << chid));
1041 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
1042 }
1043
1044 intr &= ~0x00000002;
1045 }
1046
Ben Skeggs270a5742011-07-05 14:16:05 +10001047 if (intr & 0x00100000) {
1048 u32 stat = nv_rd32(dev, 0x6100ac);
1049
1050 if (stat & 0x00000007) {
1051 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
1052
1053 if (stat & 0x00000001)
1054 nvd0_display_unk1_handler(dev);
1055 if (stat & 0x00000002)
1056 nvd0_display_unk2_handler(dev);
1057 if (stat & 0x00000004)
1058 nvd0_display_unk4_handler(dev);
1059 stat &= ~0x00000007;
1060 }
1061
1062 if (stat) {
1063 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
1064 nv_wr32(dev, 0x6100ac, stat);
1065 }
1066
1067 intr &= ~0x00100000;
1068 }
1069
Ben Skeggs46005222011-07-05 11:01:13 +10001070 if (intr & 0x01000000) {
1071 u32 stat = nv_rd32(dev, 0x6100bc);
1072 nv_wr32(dev, 0x6100bc, stat);
1073 intr &= ~0x01000000;
1074 }
1075
1076 if (intr & 0x02000000) {
1077 u32 stat = nv_rd32(dev, 0x6108bc);
1078 nv_wr32(dev, 0x6108bc, stat);
1079 intr &= ~0x02000000;
1080 }
1081
1082 if (intr)
1083 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
1084}
Ben Skeggs26f6d882011-07-04 16:25:18 +10001085
1086/******************************************************************************
1087 * Init
1088 *****************************************************************************/
1089static void
1090nvd0_display_fini(struct drm_device *dev)
1091{
1092 int i;
1093
1094 /* fini cursors */
1095 for (i = 14; i >= 13; i--) {
1096 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
1097 continue;
1098
1099 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
1100 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
1101 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
1102 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
1103 }
1104
1105 /* fini master */
1106 if (nv_rd32(dev, 0x610490) & 0x00000010) {
1107 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
1108 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
1109 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
1110 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
1111 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
1112 }
1113}
1114
1115int
1116nvd0_display_init(struct drm_device *dev)
1117{
1118 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001119 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001120 int i;
1121
1122 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
1123 nv_wr32(dev, 0x6100ac, 0x00000100);
1124 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
1125 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
1126 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
1127 nv_rd32(dev, 0x6194e8));
1128 return -EBUSY;
1129 }
1130 }
1131
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001132 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1133 * work at all unless you do the SOR part below.
1134 */
1135 for (i = 0; i < 3; i++) {
1136 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1137 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1138 }
1139
1140 for (i = 0; i < 4; i++) {
1141 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1142 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1143 }
1144
1145 for (i = 0; i < 2; i++) {
1146 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1147 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1148 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1149 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1150 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1151 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1152 }
1153
1154 /* point at our hash table / objects, enable interrupts */
Ben Skeggs26f6d882011-07-04 16:25:18 +10001155 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +10001156 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001157
1158 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +10001159 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001160 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001161 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001162 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1163 nv_wr32(dev, 0x640000, 0x00000000);
1164 nv_wr32(dev, 0x610490, 0x01000013);
1165 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1166 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1167 nv_rd32(dev, 0x610490));
1168 return -EBUSY;
1169 }
1170 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1171 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1172
1173 /* init cursors */
1174 for (i = 13; i <= 14; i++) {
1175 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1176 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1177 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1178 nv_rd32(dev, 0x610490 + (i * 0x10)));
1179 return -EBUSY;
1180 }
1181
1182 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1183 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1184 }
1185
Ben Skeggsefd272a2011-07-05 11:58:58 +10001186 push = evo_wait(dev, 0, 32);
1187 if (!push)
1188 return -EBUSY;
1189 evo_mthd(push, 0x0088, 1);
1190 evo_data(push, MEM_SYNC);
1191 evo_mthd(push, 0x0084, 1);
1192 evo_data(push, 0x00000000);
1193 evo_mthd(push, 0x0084, 1);
1194 evo_data(push, 0x80000000);
1195 evo_mthd(push, 0x008c, 1);
1196 evo_data(push, 0x00000000);
1197 evo_kick(push, dev, 0);
1198
Ben Skeggs26f6d882011-07-04 16:25:18 +10001199 return 0;
1200}
1201
1202void
1203nvd0_display_destroy(struct drm_device *dev)
1204{
1205 struct drm_nouveau_private *dev_priv = dev->dev_private;
1206 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +10001207 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001208
1209 nvd0_display_fini(dev);
1210
Ben Skeggs51beb422011-07-05 10:33:08 +10001211 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001212 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +10001213 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +10001214
1215 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001216 kfree(disp);
1217}
1218
1219int
1220nvd0_display_create(struct drm_device *dev)
1221{
1222 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +10001223 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001224 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1225 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +10001226 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001227 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001228 struct dcb_entry *dcbe;
1229 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001230
1231 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1232 if (!disp)
1233 return -ENOMEM;
1234 dev_priv->engine.display.priv = disp;
1235
Ben Skeggs438d99e2011-07-05 16:48:06 +10001236 /* create crtc objects to represent the hw heads */
1237 for (i = 0; i < 2; i++) {
1238 ret = nvd0_crtc_create(dev, i);
1239 if (ret)
1240 goto out;
1241 }
1242
Ben Skeggs83fc0832011-07-05 13:08:40 +10001243 /* create encoder/connector objects based on VBIOS DCB table */
1244 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1245 connector = nouveau_connector_create(dev, dcbe->connector);
1246 if (IS_ERR(connector))
1247 continue;
1248
1249 if (dcbe->location != DCB_LOC_ON_CHIP) {
1250 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1251 dcbe->type, ffs(dcbe->or) - 1);
1252 continue;
1253 }
1254
1255 switch (dcbe->type) {
1256 case OUTPUT_TMDS:
1257 nvd0_sor_create(connector, dcbe);
1258 break;
Ben Skeggs8eaa9662011-07-06 15:25:47 +10001259 case OUTPUT_ANALOG:
1260 nvd0_dac_create(connector, dcbe);
1261 break;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001262 default:
1263 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1264 dcbe->type, ffs(dcbe->or) - 1);
1265 continue;
1266 }
1267 }
1268
1269 /* cull any connectors we created that don't have an encoder */
1270 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1271 if (connector->encoder_ids[0])
1272 continue;
1273
1274 NV_WARN(dev, "%s has no encoders, removing\n",
1275 drm_get_connector_name(connector));
1276 connector->funcs->destroy(connector);
1277 }
1278
Ben Skeggs46005222011-07-05 11:01:13 +10001279 /* setup interrupt handling */
1280 nouveau_irq_register(dev, 26, nvd0_display_intr);
1281
Ben Skeggs51beb422011-07-05 10:33:08 +10001282 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +10001283 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1284 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001285 if (ret)
1286 goto out;
1287
Ben Skeggsefd272a2011-07-05 11:58:58 +10001288 nv_wo32(disp->mem, 0x1000, 0x00000049);
1289 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1290 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1291 nv_wo32(disp->mem, 0x100c, 0x00000000);
1292 nv_wo32(disp->mem, 0x1010, 0x00000000);
1293 nv_wo32(disp->mem, 0x1014, 0x00000000);
1294 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1295 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1296
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001297 nv_wo32(disp->mem, 0x1020, 0x00000049);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001298 nv_wo32(disp->mem, 0x1024, 0x00000000);
1299 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1300 nv_wo32(disp->mem, 0x102c, 0x00000000);
1301 nv_wo32(disp->mem, 0x1030, 0x00000000);
1302 nv_wo32(disp->mem, 0x1034, 0x00000000);
1303 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1304 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1305
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001306 nv_wo32(disp->mem, 0x1040, 0x00000009);
1307 nv_wo32(disp->mem, 0x1044, 0x00000000);
1308 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1309 nv_wo32(disp->mem, 0x104c, 0x00000000);
1310 nv_wo32(disp->mem, 0x1050, 0x00000000);
1311 nv_wo32(disp->mem, 0x1054, 0x00000000);
1312 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1313 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1314
1315 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1316 nv_wo32(disp->mem, 0x1064, 0x00000000);
1317 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1318 nv_wo32(disp->mem, 0x106c, 0x00000000);
1319 nv_wo32(disp->mem, 0x1070, 0x00000000);
1320 nv_wo32(disp->mem, 0x1074, 0x00000000);
1321 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1322 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1323
Ben Skeggsefd272a2011-07-05 11:58:58 +10001324 pinstmem->flush(dev);
1325
Ben Skeggs51beb422011-07-05 10:33:08 +10001326 /* push buffers for evo channels */
1327 disp->evo[0].ptr =
1328 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1329 if (!disp->evo[0].ptr) {
1330 ret = -ENOMEM;
1331 goto out;
1332 }
1333
Ben Skeggs26f6d882011-07-04 16:25:18 +10001334 ret = nvd0_display_init(dev);
1335 if (ret)
1336 goto out;
1337
1338out:
1339 if (ret)
1340 nvd0_display_destroy(dev);
1341 return ret;
1342}