blob: 7b3efff172397955886aa91d0869af2cc53ec320 [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
Ben Skeggs438d99e2011-07-05 16:48:06 +100034#include "nouveau_fb.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100035
Ben Skeggsefd272a2011-07-05 11:58:58 +100036#define MEM_SYNC 0xe0000001
37#define MEM_VRAM 0xe0010000
Ben Skeggsc0cc92a2011-07-06 11:40:45 +100038#include "nouveau_dma.h"
Ben Skeggsefd272a2011-07-05 11:58:58 +100039
Ben Skeggs26f6d882011-07-04 16:25:18 +100040struct nvd0_display {
41 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100042 struct {
43 dma_addr_t handle;
44 u32 *ptr;
45 } evo[1];
Ben Skeggs26f6d882011-07-04 16:25:18 +100046};
47
48static struct nvd0_display *
49nvd0_display(struct drm_device *dev)
50{
51 struct drm_nouveau_private *dev_priv = dev->dev_private;
52 return dev_priv->engine.display.priv;
53}
54
Ben Skeggs51beb422011-07-05 10:33:08 +100055static int
56evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
57{
58 int ret = 0;
59 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
60 nv_wr32(dev, 0x610704 + (id * 0x10), data);
61 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
62 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
63 ret = -EBUSY;
64 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
65 return ret;
66}
67
68static u32 *
69evo_wait(struct drm_device *dev, int id, int nr)
70{
71 struct nvd0_display *disp = nvd0_display(dev);
72 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
73
74 if (put + nr >= (PAGE_SIZE / 4)) {
75 disp->evo[id].ptr[put] = 0x20000000;
76
77 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
78 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
79 NV_ERROR(dev, "evo %d dma stalled\n", id);
80 return NULL;
81 }
82
83 put = 0;
84 }
85
86 return disp->evo[id].ptr + put;
87}
88
89static void
90evo_kick(u32 *push, struct drm_device *dev, int id)
91{
92 struct nvd0_display *disp = nvd0_display(dev);
93 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
94}
95
96#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
97#define evo_data(p,d) *((p)++) = (d)
98
Ben Skeggs83fc0832011-07-05 13:08:40 +100099static struct drm_crtc *
100nvd0_display_crtc_get(struct drm_encoder *encoder)
101{
102 return nouveau_encoder(encoder)->crtc;
103}
104
Ben Skeggs26f6d882011-07-04 16:25:18 +1000105/******************************************************************************
Ben Skeggs438d99e2011-07-05 16:48:06 +1000106 * CRTC
107 *****************************************************************************/
108static int
109nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
110{
111 struct drm_device *dev = nv_crtc->base.dev;
112 u32 *push, mode;
113
114 mode = 0x00000000;
115 if (on) {
116 /* 0x11: 6bpc dynamic 2x2
117 * 0x13: 8bpc dynamic 2x2
118 * 0x19: 6bpc static 2x2
119 * 0x1b: 8bpc static 2x2
120 * 0x21: 6bpc temporal
121 * 0x23: 8bpc temporal
122 */
123 mode = 0x00000011;
124 }
125
126 push = evo_wait(dev, 0, 4);
127 if (push) {
128 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
129 evo_data(push, mode);
130 if (update) {
131 evo_mthd(push, 0x0080, 1);
132 evo_data(push, 0x00000000);
133 }
134 evo_kick(push, dev, 0);
135 }
136
137 return 0;
138}
139
140static int
141nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
142{
143 struct drm_display_mode *mode = &nv_crtc->base.mode;
144 struct drm_device *dev = nv_crtc->base.dev;
145 u32 *push;
146
147 /*XXX: actually handle scaling */
148
149 push = evo_wait(dev, 0, 16);
150 if (push) {
151 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
152 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
153 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
154 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
155 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
156 evo_data(push, 0x00000000);
157 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
158 evo_data(push, 0x00000000);
159 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
161 if (update) {
162 evo_mthd(push, 0x0080, 1);
163 evo_data(push, 0x00000000);
164 }
165 evo_kick(push, dev, 0);
166 }
167
168 return 0;
169}
170
171static int
172nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
173 int x, int y, bool update)
174{
175 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
176 u32 *push;
177
Ben Skeggs438d99e2011-07-05 16:48:06 +1000178 push = evo_wait(fb->dev, 0, 16);
179 if (push) {
180 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
181 evo_data(push, nvfb->nvbo->bo.offset >> 8);
182 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
183 evo_data(push, (fb->height << 16) | fb->width);
184 evo_data(push, nvfb->r_pitch);
185 evo_data(push, nvfb->r_format);
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000186 evo_data(push, nvfb->r_dma);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000187 evo_kick(push, fb->dev, 0);
188 }
189
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000190 nv_crtc->fb.tile_flags = nvfb->r_dma;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000191 return 0;
192}
193
194static void
195nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
196{
197 struct drm_device *dev = nv_crtc->base.dev;
198 u32 *push = evo_wait(dev, 0, 16);
199 if (push) {
200 if (show) {
201 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
202 evo_data(push, 0x85000000);
203 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
204 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
205 evo_data(push, MEM_VRAM);
206 } else {
207 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
208 evo_data(push, 0x05000000);
209 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
210 evo_data(push, 0x00000000);
211 }
212
213 if (update) {
214 evo_mthd(push, 0x0080, 1);
215 evo_data(push, 0x00000000);
216 }
217
218 evo_kick(push, dev, 0);
219 }
220}
221
222static void
223nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
224{
225}
226
227static void
228nvd0_crtc_prepare(struct drm_crtc *crtc)
229{
230 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
231 u32 *push;
232
233 push = evo_wait(crtc->dev, 0, 2);
234 if (push) {
235 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
236 evo_data(push, 0x00000000);
237 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
238 evo_data(push, 0x03000000);
239 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
240 evo_data(push, 0x00000000);
241 evo_kick(push, crtc->dev, 0);
242 }
243
244 nvd0_crtc_cursor_show(nv_crtc, false, false);
245}
246
247static void
248nvd0_crtc_commit(struct drm_crtc *crtc)
249{
250 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
251 u32 *push;
252
253 push = evo_wait(crtc->dev, 0, 32);
254 if (push) {
255 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
256 evo_data(push, nv_crtc->fb.tile_flags);
257 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
258 evo_data(push, 0x83000000);
259 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
260 evo_data(push, 0x00000000);
261 evo_data(push, 0x00000000);
262 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
263 evo_data(push, MEM_VRAM);
264 evo_kick(push, crtc->dev, 0);
265 }
266
267 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
268}
269
270static bool
271nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
272 struct drm_display_mode *adjusted_mode)
273{
274 return true;
275}
276
277static int
278nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
279{
280 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
281 int ret;
282
283 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
284 if (ret)
285 return ret;
286
287 if (old_fb) {
288 nvfb = nouveau_framebuffer(old_fb);
289 nouveau_bo_unpin(nvfb->nvbo);
290 }
291
292 return 0;
293}
294
295static int
296nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
297 struct drm_display_mode *mode, int x, int y,
298 struct drm_framebuffer *old_fb)
299{
300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
301 struct nouveau_connector *nv_connector;
302 u32 htotal = mode->htotal;
303 u32 vtotal = mode->vtotal;
304 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
305 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
306 u32 hfrntp = mode->hsync_start - mode->hdisplay;
307 u32 vfrntp = mode->vsync_start - mode->vdisplay;
308 u32 hbackp = mode->htotal - mode->hsync_end;
309 u32 vbackp = mode->vtotal - mode->vsync_end;
310 u32 hss2be = hsyncw + hbackp;
311 u32 vss2be = vsyncw + vbackp;
312 u32 hss2de = htotal - hfrntp;
313 u32 vss2de = vtotal - vfrntp;
314 u32 hstart = 0;
315 u32 vstart = 0;
316 u32 *push;
317 int ret;
318
319 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
320 if (ret)
321 return ret;
322
323 push = evo_wait(crtc->dev, 0, 64);
324 if (push) {
325 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
326 evo_data(push, (vstart << 16) | hstart);
327 evo_data(push, (vtotal << 16) | htotal);
328 evo_data(push, (vsyncw << 16) | hsyncw);
329 evo_data(push, (vss2be << 16) | hss2be);
330 evo_data(push, (vss2de << 16) | hss2de);
331 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
332 evo_data(push, 0x00000000); /* ??? */
333 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
334 evo_data(push, mode->clock * 1000);
335 evo_data(push, 0x00200000); /* ??? */
336 evo_data(push, mode->clock * 1000);
337 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
338 evo_data(push, 0x31ec6000); /* ??? */
339 evo_kick(push, crtc->dev, 0);
340 }
341
342 nv_connector = nouveau_crtc_connector_get(nv_crtc);
343 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
344 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
345 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
346 return 0;
347}
348
349static int
350nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
351 struct drm_framebuffer *old_fb)
352{
353 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
354 int ret;
355
356 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
357 if (ret)
358 return ret;
359
360 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
361 return 0;
362}
363
364static int
365nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
366 struct drm_framebuffer *fb, int x, int y,
367 enum mode_set_atomic state)
368{
369 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
370 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
371 return 0;
372}
373
374static void
375nvd0_crtc_lut_load(struct drm_crtc *crtc)
376{
377 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
378 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
379 int i;
380
381 for (i = 0; i < 256; i++) {
382 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
383 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
384 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
385 }
386}
387
388static int
389nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
390 uint32_t handle, uint32_t width, uint32_t height)
391{
392 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
393 struct drm_device *dev = crtc->dev;
394 struct drm_gem_object *gem;
395 struct nouveau_bo *nvbo;
396 bool visible = (handle != 0);
397 int i, ret = 0;
398
399 if (visible) {
400 if (width != 64 || height != 64)
401 return -EINVAL;
402
403 gem = drm_gem_object_lookup(dev, file_priv, handle);
404 if (unlikely(!gem))
405 return -ENOENT;
406 nvbo = nouveau_gem_object(gem);
407
408 ret = nouveau_bo_map(nvbo);
409 if (ret == 0) {
410 for (i = 0; i < 64 * 64; i++) {
411 u32 v = nouveau_bo_rd32(nvbo, i);
412 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
413 }
414 nouveau_bo_unmap(nvbo);
415 }
416
417 drm_gem_object_unreference_unlocked(gem);
418 }
419
420 if (visible != nv_crtc->cursor.visible) {
421 nvd0_crtc_cursor_show(nv_crtc, visible, true);
422 nv_crtc->cursor.visible = visible;
423 }
424
425 return ret;
426}
427
428static int
429nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
430{
431 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
432 const u32 data = (y << 16) | x;
433
434 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
435 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
436 return 0;
437}
438
439static void
440nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
441 uint32_t start, uint32_t size)
442{
443 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
444 u32 end = max(start + size, (u32)256);
445 u32 i;
446
447 for (i = start; i < end; i++) {
448 nv_crtc->lut.r[i] = r[i];
449 nv_crtc->lut.g[i] = g[i];
450 nv_crtc->lut.b[i] = b[i];
451 }
452
453 nvd0_crtc_lut_load(crtc);
454}
455
456static void
457nvd0_crtc_destroy(struct drm_crtc *crtc)
458{
459 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
460 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
461 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
462 nouveau_bo_unmap(nv_crtc->lut.nvbo);
463 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
464 drm_crtc_cleanup(crtc);
465 kfree(crtc);
466}
467
468static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
469 .dpms = nvd0_crtc_dpms,
470 .prepare = nvd0_crtc_prepare,
471 .commit = nvd0_crtc_commit,
472 .mode_fixup = nvd0_crtc_mode_fixup,
473 .mode_set = nvd0_crtc_mode_set,
474 .mode_set_base = nvd0_crtc_mode_set_base,
475 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
476 .load_lut = nvd0_crtc_lut_load,
477};
478
479static const struct drm_crtc_funcs nvd0_crtc_func = {
480 .cursor_set = nvd0_crtc_cursor_set,
481 .cursor_move = nvd0_crtc_cursor_move,
482 .gamma_set = nvd0_crtc_gamma_set,
483 .set_config = drm_crtc_helper_set_config,
484 .destroy = nvd0_crtc_destroy,
485};
486
487static int
488nvd0_crtc_create(struct drm_device *dev, int index)
489{
490 struct nouveau_crtc *nv_crtc;
491 struct drm_crtc *crtc;
492 int ret, i;
493
494 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
495 if (!nv_crtc)
496 return -ENOMEM;
497
498 nv_crtc->index = index;
499 nv_crtc->set_dither = nvd0_crtc_set_dither;
500 nv_crtc->set_scale = nvd0_crtc_set_scale;
501 for (i = 0; i < 256; i++) {
502 nv_crtc->lut.r[i] = i << 8;
503 nv_crtc->lut.g[i] = i << 8;
504 nv_crtc->lut.b[i] = i << 8;
505 }
506
507 crtc = &nv_crtc->base;
508 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
509 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
510 drm_mode_crtc_set_gamma_size(crtc, 256);
511
512 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
513 0, 0x0000, &nv_crtc->cursor.nvbo);
514 if (!ret) {
515 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
516 if (!ret)
517 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
518 if (ret)
519 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
520 }
521
522 if (ret)
523 goto out;
524
525 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
526 0, 0x0000, &nv_crtc->lut.nvbo);
527 if (!ret) {
528 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
529 if (!ret)
530 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
531 if (ret)
532 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
533 }
534
535 if (ret)
536 goto out;
537
538 nvd0_crtc_lut_load(crtc);
539
540out:
541 if (ret)
542 nvd0_crtc_destroy(crtc);
543 return ret;
544}
545
546/******************************************************************************
Ben Skeggs26f6d882011-07-04 16:25:18 +1000547 * DAC
548 *****************************************************************************/
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000549static void
550nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
551{
552 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
553 struct drm_device *dev = encoder->dev;
554 int or = nv_encoder->or;
555 u32 dpms_ctrl;
556
557 dpms_ctrl = 0x80000000;
558 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
559 dpms_ctrl |= 0x00000001;
560 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
561 dpms_ctrl |= 0x00000004;
562
563 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
564 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
565 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
566}
567
568static bool
569nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
570 struct drm_display_mode *adjusted_mode)
571{
572 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
573 struct nouveau_connector *nv_connector;
574
575 nv_connector = nouveau_encoder_connector_get(nv_encoder);
576 if (nv_connector && nv_connector->native_mode) {
577 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
578 int id = adjusted_mode->base.id;
579 *adjusted_mode = *nv_connector->native_mode;
580 adjusted_mode->base.id = id;
581 }
582 }
583
584 return true;
585}
586
587static void
588nvd0_dac_prepare(struct drm_encoder *encoder)
589{
590}
591
592static void
593nvd0_dac_commit(struct drm_encoder *encoder)
594{
595}
596
597static void
598nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
599 struct drm_display_mode *adjusted_mode)
600{
601 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
602 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
603 u32 *push;
604
605 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
606
607 push = evo_wait(encoder->dev, 0, 2);
608 if (push) {
609 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
610 evo_data(push, 1 << nv_crtc->index);
611 evo_kick(push, encoder->dev, 0);
612 }
613
614 nv_encoder->crtc = encoder->crtc;
615}
616
617static void
618nvd0_dac_disconnect(struct drm_encoder *encoder)
619{
620 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
621 struct drm_device *dev = encoder->dev;
622 u32 *push;
623
624 if (nv_encoder->crtc) {
625 nvd0_crtc_prepare(nv_encoder->crtc);
626
627 push = evo_wait(dev, 0, 4);
628 if (push) {
629 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
630 evo_data(push, 0x00000000);
631 evo_mthd(push, 0x0080, 1);
632 evo_data(push, 0x00000000);
633 evo_kick(push, dev, 0);
634 }
635
636 nv_encoder->crtc = NULL;
637 }
638}
639
640static void
641nvd0_dac_destroy(struct drm_encoder *encoder)
642{
643 drm_encoder_cleanup(encoder);
644 kfree(encoder);
645}
646
647static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
648 .dpms = nvd0_dac_dpms,
649 .mode_fixup = nvd0_dac_mode_fixup,
650 .prepare = nvd0_dac_prepare,
651 .commit = nvd0_dac_commit,
652 .mode_set = nvd0_dac_mode_set,
653 .disable = nvd0_dac_disconnect,
654 .get_crtc = nvd0_display_crtc_get,
655};
656
657static const struct drm_encoder_funcs nvd0_dac_func = {
658 .destroy = nvd0_dac_destroy,
659};
660
661static int
662nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
663{
664 struct drm_device *dev = connector->dev;
665 struct nouveau_encoder *nv_encoder;
666 struct drm_encoder *encoder;
667
668 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
669 if (!nv_encoder)
670 return -ENOMEM;
671 nv_encoder->dcb = dcbe;
672 nv_encoder->or = ffs(dcbe->or) - 1;
673
674 encoder = to_drm_encoder(nv_encoder);
675 encoder->possible_crtcs = dcbe->heads;
676 encoder->possible_clones = 0;
677 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
678 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
679
680 drm_mode_connector_attach_encoder(connector, encoder);
681 return 0;
682}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000683
684/******************************************************************************
685 * SOR
686 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000687static void
688nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
689{
690 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
691 struct drm_device *dev = encoder->dev;
692 struct drm_encoder *partner;
693 int or = nv_encoder->or;
694 u32 dpms_ctrl;
695
696 nv_encoder->last_dpms = mode;
697
698 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
699 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
700
701 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
702 continue;
703
704 if (nv_partner != nv_encoder &&
705 nv_partner->dcb->or == nv_encoder->or) {
706 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
707 return;
708 break;
709 }
710 }
711
712 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
713 dpms_ctrl |= 0x80000000;
714
715 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
716 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
717 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
718 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
719}
720
721static bool
722nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
723 struct drm_display_mode *adjusted_mode)
724{
725 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
726 struct nouveau_connector *nv_connector;
727
728 nv_connector = nouveau_encoder_connector_get(nv_encoder);
729 if (nv_connector && nv_connector->native_mode) {
730 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
731 int id = adjusted_mode->base.id;
732 *adjusted_mode = *nv_connector->native_mode;
733 adjusted_mode->base.id = id;
734 }
735 }
736
737 return true;
738}
739
740static void
741nvd0_sor_prepare(struct drm_encoder *encoder)
742{
743}
744
745static void
746nvd0_sor_commit(struct drm_encoder *encoder)
747{
748}
749
750static void
751nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
752 struct drm_display_mode *adjusted_mode)
753{
754 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
755 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
756 u32 mode_ctrl = (1 << nv_crtc->index);
757 u32 *push;
758
759 if (nv_encoder->dcb->sorconf.link & 1) {
760 if (adjusted_mode->clock < 165000)
761 mode_ctrl |= 0x00000100;
762 else
763 mode_ctrl |= 0x00000500;
764 } else {
765 mode_ctrl |= 0x00000200;
766 }
767
768 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
769
770 push = evo_wait(encoder->dev, 0, 2);
771 if (push) {
772 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
773 evo_data(push, mode_ctrl);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000774 evo_kick(push, encoder->dev, 0);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000775 }
776
777 nv_encoder->crtc = encoder->crtc;
778}
779
780static void
781nvd0_sor_disconnect(struct drm_encoder *encoder)
782{
783 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
784 struct drm_device *dev = encoder->dev;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000785 u32 *push;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000786
787 if (nv_encoder->crtc) {
Ben Skeggs438d99e2011-07-05 16:48:06 +1000788 nvd0_crtc_prepare(nv_encoder->crtc);
789
790 push = evo_wait(dev, 0, 4);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000791 if (push) {
792 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
793 evo_data(push, 0x00000000);
794 evo_mthd(push, 0x0080, 1);
795 evo_data(push, 0x00000000);
796 evo_kick(push, dev, 0);
797 }
798
799 nv_encoder->crtc = NULL;
800 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
801 }
802}
803
804static void
805nvd0_sor_destroy(struct drm_encoder *encoder)
806{
807 drm_encoder_cleanup(encoder);
808 kfree(encoder);
809}
810
811static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
812 .dpms = nvd0_sor_dpms,
813 .mode_fixup = nvd0_sor_mode_fixup,
814 .prepare = nvd0_sor_prepare,
815 .commit = nvd0_sor_commit,
816 .mode_set = nvd0_sor_mode_set,
817 .disable = nvd0_sor_disconnect,
818 .get_crtc = nvd0_display_crtc_get,
819};
820
821static const struct drm_encoder_funcs nvd0_sor_func = {
822 .destroy = nvd0_sor_destroy,
823};
824
825static int
826nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
827{
828 struct drm_device *dev = connector->dev;
829 struct nouveau_encoder *nv_encoder;
830 struct drm_encoder *encoder;
831
832 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
833 if (!nv_encoder)
834 return -ENOMEM;
835 nv_encoder->dcb = dcbe;
836 nv_encoder->or = ffs(dcbe->or) - 1;
837 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
838
839 encoder = to_drm_encoder(nv_encoder);
840 encoder->possible_crtcs = dcbe->heads;
841 encoder->possible_clones = 0;
842 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
843 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
844
845 drm_mode_connector_attach_encoder(connector, encoder);
846 return 0;
847}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000848
849/******************************************************************************
850 * IRQ
851 *****************************************************************************/
Ben Skeggs46005222011-07-05 11:01:13 +1000852static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000853nvd0_display_unk1_handler(struct drm_device *dev)
854{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000855 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
856 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000857
858 nv_wr32(dev, 0x6101d4, 0x00000000);
859 nv_wr32(dev, 0x6109d4, 0x00000000);
860 nv_wr32(dev, 0x6101d0, 0x80000000);
861}
862
863static void
864nvd0_display_unk2_handler(struct drm_device *dev)
865{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000866 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
867 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000868
869 nv_wr32(dev, 0x6101d4, 0x00000000);
870 nv_wr32(dev, 0x6109d4, 0x00000000);
871 nv_wr32(dev, 0x6101d0, 0x80000000);
872}
873
874static void
875nvd0_display_unk4_handler(struct drm_device *dev)
876{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000877 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
878 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000879
880 nv_wr32(dev, 0x6101d4, 0x00000000);
881 nv_wr32(dev, 0x6109d4, 0x00000000);
882 nv_wr32(dev, 0x6101d0, 0x80000000);
883}
884
885static void
Ben Skeggs46005222011-07-05 11:01:13 +1000886nvd0_display_intr(struct drm_device *dev)
887{
888 u32 intr = nv_rd32(dev, 0x610088);
889
890 if (intr & 0x00000002) {
891 u32 stat = nv_rd32(dev, 0x61009c);
892 int chid = ffs(stat) - 1;
893 if (chid >= 0) {
894 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
895 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
896 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
897
898 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
899 "0x%08x 0x%08x\n",
900 chid, (mthd & 0x0000ffc), data, mthd, unkn);
901 nv_wr32(dev, 0x61009c, (1 << chid));
902 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
903 }
904
905 intr &= ~0x00000002;
906 }
907
Ben Skeggs270a5742011-07-05 14:16:05 +1000908 if (intr & 0x00100000) {
909 u32 stat = nv_rd32(dev, 0x6100ac);
910
911 if (stat & 0x00000007) {
912 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
913
914 if (stat & 0x00000001)
915 nvd0_display_unk1_handler(dev);
916 if (stat & 0x00000002)
917 nvd0_display_unk2_handler(dev);
918 if (stat & 0x00000004)
919 nvd0_display_unk4_handler(dev);
920 stat &= ~0x00000007;
921 }
922
923 if (stat) {
924 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
925 nv_wr32(dev, 0x6100ac, stat);
926 }
927
928 intr &= ~0x00100000;
929 }
930
Ben Skeggs46005222011-07-05 11:01:13 +1000931 if (intr & 0x01000000) {
932 u32 stat = nv_rd32(dev, 0x6100bc);
933 nv_wr32(dev, 0x6100bc, stat);
934 intr &= ~0x01000000;
935 }
936
937 if (intr & 0x02000000) {
938 u32 stat = nv_rd32(dev, 0x6108bc);
939 nv_wr32(dev, 0x6108bc, stat);
940 intr &= ~0x02000000;
941 }
942
943 if (intr)
944 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
945}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000946
947/******************************************************************************
948 * Init
949 *****************************************************************************/
950static void
951nvd0_display_fini(struct drm_device *dev)
952{
953 int i;
954
955 /* fini cursors */
956 for (i = 14; i >= 13; i--) {
957 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
958 continue;
959
960 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
961 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
962 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
963 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
964 }
965
966 /* fini master */
967 if (nv_rd32(dev, 0x610490) & 0x00000010) {
968 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
969 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
970 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
971 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
972 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
973 }
974}
975
976int
977nvd0_display_init(struct drm_device *dev)
978{
979 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000980 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000981 int i;
982
983 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
984 nv_wr32(dev, 0x6100ac, 0x00000100);
985 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
986 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
987 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
988 nv_rd32(dev, 0x6194e8));
989 return -EBUSY;
990 }
991 }
992
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000993 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
994 * work at all unless you do the SOR part below.
995 */
996 for (i = 0; i < 3; i++) {
997 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
998 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
999 }
1000
1001 for (i = 0; i < 4; i++) {
1002 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1003 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1004 }
1005
1006 for (i = 0; i < 2; i++) {
1007 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1008 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1009 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1010 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1011 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1012 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1013 }
1014
1015 /* point at our hash table / objects, enable interrupts */
Ben Skeggs26f6d882011-07-04 16:25:18 +10001016 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +10001017 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001018
1019 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +10001020 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001021 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001022 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001023 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1024 nv_wr32(dev, 0x640000, 0x00000000);
1025 nv_wr32(dev, 0x610490, 0x01000013);
1026 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1027 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1028 nv_rd32(dev, 0x610490));
1029 return -EBUSY;
1030 }
1031 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1032 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1033
1034 /* init cursors */
1035 for (i = 13; i <= 14; i++) {
1036 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1037 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1038 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1039 nv_rd32(dev, 0x610490 + (i * 0x10)));
1040 return -EBUSY;
1041 }
1042
1043 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1044 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1045 }
1046
Ben Skeggsefd272a2011-07-05 11:58:58 +10001047 push = evo_wait(dev, 0, 32);
1048 if (!push)
1049 return -EBUSY;
1050 evo_mthd(push, 0x0088, 1);
1051 evo_data(push, MEM_SYNC);
1052 evo_mthd(push, 0x0084, 1);
1053 evo_data(push, 0x00000000);
1054 evo_mthd(push, 0x0084, 1);
1055 evo_data(push, 0x80000000);
1056 evo_mthd(push, 0x008c, 1);
1057 evo_data(push, 0x00000000);
1058 evo_kick(push, dev, 0);
1059
Ben Skeggs26f6d882011-07-04 16:25:18 +10001060 return 0;
1061}
1062
1063void
1064nvd0_display_destroy(struct drm_device *dev)
1065{
1066 struct drm_nouveau_private *dev_priv = dev->dev_private;
1067 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +10001068 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001069
1070 nvd0_display_fini(dev);
1071
Ben Skeggs51beb422011-07-05 10:33:08 +10001072 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001073 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +10001074 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +10001075
1076 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001077 kfree(disp);
1078}
1079
1080int
1081nvd0_display_create(struct drm_device *dev)
1082{
1083 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +10001084 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001085 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1086 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +10001087 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001088 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001089 struct dcb_entry *dcbe;
1090 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001091
1092 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1093 if (!disp)
1094 return -ENOMEM;
1095 dev_priv->engine.display.priv = disp;
1096
Ben Skeggs438d99e2011-07-05 16:48:06 +10001097 /* create crtc objects to represent the hw heads */
1098 for (i = 0; i < 2; i++) {
1099 ret = nvd0_crtc_create(dev, i);
1100 if (ret)
1101 goto out;
1102 }
1103
Ben Skeggs83fc0832011-07-05 13:08:40 +10001104 /* create encoder/connector objects based on VBIOS DCB table */
1105 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1106 connector = nouveau_connector_create(dev, dcbe->connector);
1107 if (IS_ERR(connector))
1108 continue;
1109
1110 if (dcbe->location != DCB_LOC_ON_CHIP) {
1111 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1112 dcbe->type, ffs(dcbe->or) - 1);
1113 continue;
1114 }
1115
1116 switch (dcbe->type) {
1117 case OUTPUT_TMDS:
1118 nvd0_sor_create(connector, dcbe);
1119 break;
Ben Skeggs8eaa9662011-07-06 15:25:47 +10001120 case OUTPUT_ANALOG:
1121 nvd0_dac_create(connector, dcbe);
1122 break;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001123 default:
1124 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1125 dcbe->type, ffs(dcbe->or) - 1);
1126 continue;
1127 }
1128 }
1129
1130 /* cull any connectors we created that don't have an encoder */
1131 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1132 if (connector->encoder_ids[0])
1133 continue;
1134
1135 NV_WARN(dev, "%s has no encoders, removing\n",
1136 drm_get_connector_name(connector));
1137 connector->funcs->destroy(connector);
1138 }
1139
Ben Skeggs46005222011-07-05 11:01:13 +10001140 /* setup interrupt handling */
1141 nouveau_irq_register(dev, 26, nvd0_display_intr);
1142
Ben Skeggs51beb422011-07-05 10:33:08 +10001143 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +10001144 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1145 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001146 if (ret)
1147 goto out;
1148
Ben Skeggsefd272a2011-07-05 11:58:58 +10001149 nv_wo32(disp->mem, 0x1000, 0x00000049);
1150 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1151 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1152 nv_wo32(disp->mem, 0x100c, 0x00000000);
1153 nv_wo32(disp->mem, 0x1010, 0x00000000);
1154 nv_wo32(disp->mem, 0x1014, 0x00000000);
1155 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1156 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1157
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001158 nv_wo32(disp->mem, 0x1020, 0x00000049);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001159 nv_wo32(disp->mem, 0x1024, 0x00000000);
1160 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1161 nv_wo32(disp->mem, 0x102c, 0x00000000);
1162 nv_wo32(disp->mem, 0x1030, 0x00000000);
1163 nv_wo32(disp->mem, 0x1034, 0x00000000);
1164 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1165 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1166
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001167 nv_wo32(disp->mem, 0x1040, 0x00000009);
1168 nv_wo32(disp->mem, 0x1044, 0x00000000);
1169 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1170 nv_wo32(disp->mem, 0x104c, 0x00000000);
1171 nv_wo32(disp->mem, 0x1050, 0x00000000);
1172 nv_wo32(disp->mem, 0x1054, 0x00000000);
1173 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1174 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1175
1176 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1177 nv_wo32(disp->mem, 0x1064, 0x00000000);
1178 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1179 nv_wo32(disp->mem, 0x106c, 0x00000000);
1180 nv_wo32(disp->mem, 0x1070, 0x00000000);
1181 nv_wo32(disp->mem, 0x1074, 0x00000000);
1182 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1183 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1184
Ben Skeggsefd272a2011-07-05 11:58:58 +10001185 pinstmem->flush(dev);
1186
Ben Skeggs51beb422011-07-05 10:33:08 +10001187 /* push buffers for evo channels */
1188 disp->evo[0].ptr =
1189 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1190 if (!disp->evo[0].ptr) {
1191 ret = -ENOMEM;
1192 goto out;
1193 }
1194
Ben Skeggs26f6d882011-07-04 16:25:18 +10001195 ret = nvd0_display_init(dev);
1196 if (ret)
1197 goto out;
1198
1199out:
1200 if (ret)
1201 nvd0_display_destroy(dev);
1202 return ret;
1203}