blob: 08a54b7b6efcbbab9e2533206ece8e97cf65aa66 [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
Ben Skeggs438d99e2011-07-05 16:48:06 +100034#include "nouveau_fb.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100035
Ben Skeggsefd272a2011-07-05 11:58:58 +100036#define MEM_SYNC 0xe0000001
37#define MEM_VRAM 0xe0010000
Ben Skeggsc0cc92a2011-07-06 11:40:45 +100038#include "nouveau_dma.h"
Ben Skeggsefd272a2011-07-05 11:58:58 +100039
Ben Skeggs26f6d882011-07-04 16:25:18 +100040struct nvd0_display {
41 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100042 struct {
43 dma_addr_t handle;
44 u32 *ptr;
45 } evo[1];
Ben Skeggs26f6d882011-07-04 16:25:18 +100046};
47
48static struct nvd0_display *
49nvd0_display(struct drm_device *dev)
50{
51 struct drm_nouveau_private *dev_priv = dev->dev_private;
52 return dev_priv->engine.display.priv;
53}
54
Ben Skeggs51beb422011-07-05 10:33:08 +100055static int
56evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
57{
58 int ret = 0;
59 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
60 nv_wr32(dev, 0x610704 + (id * 0x10), data);
61 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
62 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
63 ret = -EBUSY;
64 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
65 return ret;
66}
67
68static u32 *
69evo_wait(struct drm_device *dev, int id, int nr)
70{
71 struct nvd0_display *disp = nvd0_display(dev);
72 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
73
74 if (put + nr >= (PAGE_SIZE / 4)) {
75 disp->evo[id].ptr[put] = 0x20000000;
76
77 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
78 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
79 NV_ERROR(dev, "evo %d dma stalled\n", id);
80 return NULL;
81 }
82
83 put = 0;
84 }
85
86 return disp->evo[id].ptr + put;
87}
88
89static void
90evo_kick(u32 *push, struct drm_device *dev, int id)
91{
92 struct nvd0_display *disp = nvd0_display(dev);
93 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
94}
95
96#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
97#define evo_data(p,d) *((p)++) = (d)
98
Ben Skeggs83fc0832011-07-05 13:08:40 +100099static struct drm_crtc *
100nvd0_display_crtc_get(struct drm_encoder *encoder)
101{
102 return nouveau_encoder(encoder)->crtc;
103}
104
Ben Skeggs26f6d882011-07-04 16:25:18 +1000105/******************************************************************************
Ben Skeggs438d99e2011-07-05 16:48:06 +1000106 * CRTC
107 *****************************************************************************/
108static int
109nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
110{
111 struct drm_device *dev = nv_crtc->base.dev;
112 u32 *push, mode;
113
114 mode = 0x00000000;
115 if (on) {
116 /* 0x11: 6bpc dynamic 2x2
117 * 0x13: 8bpc dynamic 2x2
118 * 0x19: 6bpc static 2x2
119 * 0x1b: 8bpc static 2x2
120 * 0x21: 6bpc temporal
121 * 0x23: 8bpc temporal
122 */
123 mode = 0x00000011;
124 }
125
126 push = evo_wait(dev, 0, 4);
127 if (push) {
128 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
129 evo_data(push, mode);
130 if (update) {
131 evo_mthd(push, 0x0080, 1);
132 evo_data(push, 0x00000000);
133 }
134 evo_kick(push, dev, 0);
135 }
136
137 return 0;
138}
139
140static int
141nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
142{
143 struct drm_display_mode *mode = &nv_crtc->base.mode;
144 struct drm_device *dev = nv_crtc->base.dev;
145 u32 *push;
146
147 /*XXX: actually handle scaling */
148
149 push = evo_wait(dev, 0, 16);
150 if (push) {
151 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
152 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
153 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
154 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
155 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
156 evo_data(push, 0x00000000);
157 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
158 evo_data(push, 0x00000000);
159 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
161 if (update) {
162 evo_mthd(push, 0x0080, 1);
163 evo_data(push, 0x00000000);
164 }
165 evo_kick(push, dev, 0);
166 }
167
168 return 0;
169}
170
171static int
172nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
173 int x, int y, bool update)
174{
175 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
176 u32 *push;
177
Ben Skeggs438d99e2011-07-05 16:48:06 +1000178 push = evo_wait(fb->dev, 0, 16);
179 if (push) {
180 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
181 evo_data(push, nvfb->nvbo->bo.offset >> 8);
182 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
183 evo_data(push, (fb->height << 16) | fb->width);
184 evo_data(push, nvfb->r_pitch);
185 evo_data(push, nvfb->r_format);
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000186 evo_data(push, nvfb->r_dma);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000187 evo_kick(push, fb->dev, 0);
188 }
189
Ben Skeggsc0cc92a2011-07-06 11:40:45 +1000190 nv_crtc->fb.tile_flags = nvfb->r_dma;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000191 return 0;
192}
193
194static void
195nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
196{
197 struct drm_device *dev = nv_crtc->base.dev;
198 u32 *push = evo_wait(dev, 0, 16);
199 if (push) {
200 if (show) {
201 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
202 evo_data(push, 0x85000000);
203 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
204 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
205 evo_data(push, MEM_VRAM);
206 } else {
207 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
208 evo_data(push, 0x05000000);
209 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
210 evo_data(push, 0x00000000);
211 }
212
213 if (update) {
214 evo_mthd(push, 0x0080, 1);
215 evo_data(push, 0x00000000);
216 }
217
218 evo_kick(push, dev, 0);
219 }
220}
221
222static void
223nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
224{
225}
226
227static void
228nvd0_crtc_prepare(struct drm_crtc *crtc)
229{
230 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
231 u32 *push;
232
233 push = evo_wait(crtc->dev, 0, 2);
234 if (push) {
235 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
236 evo_data(push, 0x00000000);
237 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
238 evo_data(push, 0x03000000);
239 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
240 evo_data(push, 0x00000000);
241 evo_kick(push, crtc->dev, 0);
242 }
243
244 nvd0_crtc_cursor_show(nv_crtc, false, false);
245}
246
247static void
248nvd0_crtc_commit(struct drm_crtc *crtc)
249{
250 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
251 u32 *push;
252
253 push = evo_wait(crtc->dev, 0, 32);
254 if (push) {
255 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
256 evo_data(push, nv_crtc->fb.tile_flags);
257 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
258 evo_data(push, 0x83000000);
259 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
260 evo_data(push, 0x00000000);
261 evo_data(push, 0x00000000);
262 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
263 evo_data(push, MEM_VRAM);
264 evo_kick(push, crtc->dev, 0);
265 }
266
267 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
268}
269
270static bool
271nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
272 struct drm_display_mode *adjusted_mode)
273{
274 return true;
275}
276
277static int
278nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
279{
280 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
281 int ret;
282
283 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
284 if (ret)
285 return ret;
286
287 if (old_fb) {
288 nvfb = nouveau_framebuffer(old_fb);
289 nouveau_bo_unpin(nvfb->nvbo);
290 }
291
292 return 0;
293}
294
295static int
296nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
297 struct drm_display_mode *mode, int x, int y,
298 struct drm_framebuffer *old_fb)
299{
300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
301 struct nouveau_connector *nv_connector;
302 u32 htotal = mode->htotal;
303 u32 vtotal = mode->vtotal;
304 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
305 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
306 u32 hfrntp = mode->hsync_start - mode->hdisplay;
307 u32 vfrntp = mode->vsync_start - mode->vdisplay;
308 u32 hbackp = mode->htotal - mode->hsync_end;
309 u32 vbackp = mode->vtotal - mode->vsync_end;
310 u32 hss2be = hsyncw + hbackp;
311 u32 vss2be = vsyncw + vbackp;
312 u32 hss2de = htotal - hfrntp;
313 u32 vss2de = vtotal - vfrntp;
314 u32 hstart = 0;
315 u32 vstart = 0;
316 u32 *push;
317 int ret;
318
319 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
320 if (ret)
321 return ret;
322
323 push = evo_wait(crtc->dev, 0, 64);
324 if (push) {
325 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
326 evo_data(push, (vstart << 16) | hstart);
327 evo_data(push, (vtotal << 16) | htotal);
328 evo_data(push, (vsyncw << 16) | hsyncw);
329 evo_data(push, (vss2be << 16) | hss2be);
330 evo_data(push, (vss2de << 16) | hss2de);
331 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
332 evo_data(push, 0x00000000); /* ??? */
333 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
334 evo_data(push, mode->clock * 1000);
335 evo_data(push, 0x00200000); /* ??? */
336 evo_data(push, mode->clock * 1000);
337 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
338 evo_data(push, 0x31ec6000); /* ??? */
339 evo_kick(push, crtc->dev, 0);
340 }
341
342 nv_connector = nouveau_crtc_connector_get(nv_crtc);
343 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
344 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
345 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
346 return 0;
347}
348
349static int
350nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
351 struct drm_framebuffer *old_fb)
352{
353 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
354 int ret;
355
356 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
357 if (ret)
358 return ret;
359
360 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
361 return 0;
362}
363
364static int
365nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
366 struct drm_framebuffer *fb, int x, int y,
367 enum mode_set_atomic state)
368{
369 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
370 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
371 return 0;
372}
373
374static void
375nvd0_crtc_lut_load(struct drm_crtc *crtc)
376{
377 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
378 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
379 int i;
380
381 for (i = 0; i < 256; i++) {
382 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
383 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
384 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
385 }
386}
387
388static int
389nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
390 uint32_t handle, uint32_t width, uint32_t height)
391{
392 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
393 struct drm_device *dev = crtc->dev;
394 struct drm_gem_object *gem;
395 struct nouveau_bo *nvbo;
396 bool visible = (handle != 0);
397 int i, ret = 0;
398
399 if (visible) {
400 if (width != 64 || height != 64)
401 return -EINVAL;
402
403 gem = drm_gem_object_lookup(dev, file_priv, handle);
404 if (unlikely(!gem))
405 return -ENOENT;
406 nvbo = nouveau_gem_object(gem);
407
408 ret = nouveau_bo_map(nvbo);
409 if (ret == 0) {
410 for (i = 0; i < 64 * 64; i++) {
411 u32 v = nouveau_bo_rd32(nvbo, i);
412 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
413 }
414 nouveau_bo_unmap(nvbo);
415 }
416
417 drm_gem_object_unreference_unlocked(gem);
418 }
419
420 if (visible != nv_crtc->cursor.visible) {
421 nvd0_crtc_cursor_show(nv_crtc, visible, true);
422 nv_crtc->cursor.visible = visible;
423 }
424
425 return ret;
426}
427
428static int
429nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
430{
431 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
432 const u32 data = (y << 16) | x;
433
434 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
435 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
436 return 0;
437}
438
439static void
440nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
441 uint32_t start, uint32_t size)
442{
443 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
444 u32 end = max(start + size, (u32)256);
445 u32 i;
446
447 for (i = start; i < end; i++) {
448 nv_crtc->lut.r[i] = r[i];
449 nv_crtc->lut.g[i] = g[i];
450 nv_crtc->lut.b[i] = b[i];
451 }
452
453 nvd0_crtc_lut_load(crtc);
454}
455
456static void
457nvd0_crtc_destroy(struct drm_crtc *crtc)
458{
459 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
460 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
461 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
462 nouveau_bo_unmap(nv_crtc->lut.nvbo);
463 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
464 drm_crtc_cleanup(crtc);
465 kfree(crtc);
466}
467
468static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
469 .dpms = nvd0_crtc_dpms,
470 .prepare = nvd0_crtc_prepare,
471 .commit = nvd0_crtc_commit,
472 .mode_fixup = nvd0_crtc_mode_fixup,
473 .mode_set = nvd0_crtc_mode_set,
474 .mode_set_base = nvd0_crtc_mode_set_base,
475 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
476 .load_lut = nvd0_crtc_lut_load,
477};
478
479static const struct drm_crtc_funcs nvd0_crtc_func = {
480 .cursor_set = nvd0_crtc_cursor_set,
481 .cursor_move = nvd0_crtc_cursor_move,
482 .gamma_set = nvd0_crtc_gamma_set,
483 .set_config = drm_crtc_helper_set_config,
484 .destroy = nvd0_crtc_destroy,
485};
486
487static int
488nvd0_crtc_create(struct drm_device *dev, int index)
489{
490 struct nouveau_crtc *nv_crtc;
491 struct drm_crtc *crtc;
492 int ret, i;
493
494 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
495 if (!nv_crtc)
496 return -ENOMEM;
497
498 nv_crtc->index = index;
499 nv_crtc->set_dither = nvd0_crtc_set_dither;
500 nv_crtc->set_scale = nvd0_crtc_set_scale;
501 for (i = 0; i < 256; i++) {
502 nv_crtc->lut.r[i] = i << 8;
503 nv_crtc->lut.g[i] = i << 8;
504 nv_crtc->lut.b[i] = i << 8;
505 }
506
507 crtc = &nv_crtc->base;
508 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
509 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
510 drm_mode_crtc_set_gamma_size(crtc, 256);
511
512 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
513 0, 0x0000, &nv_crtc->cursor.nvbo);
514 if (!ret) {
515 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
516 if (!ret)
517 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
518 if (ret)
519 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
520 }
521
522 if (ret)
523 goto out;
524
525 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
526 0, 0x0000, &nv_crtc->lut.nvbo);
527 if (!ret) {
528 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
529 if (!ret)
530 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
531 if (ret)
532 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
533 }
534
535 if (ret)
536 goto out;
537
538 nvd0_crtc_lut_load(crtc);
539
540out:
541 if (ret)
542 nvd0_crtc_destroy(crtc);
543 return ret;
544}
545
546/******************************************************************************
Ben Skeggs26f6d882011-07-04 16:25:18 +1000547 * DAC
548 *****************************************************************************/
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000549static void
550nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
551{
552 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
553 struct drm_device *dev = encoder->dev;
554 int or = nv_encoder->or;
555 u32 dpms_ctrl;
556
557 dpms_ctrl = 0x80000000;
558 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
559 dpms_ctrl |= 0x00000001;
560 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
561 dpms_ctrl |= 0x00000004;
562
563 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
564 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
565 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
566}
567
568static bool
569nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
570 struct drm_display_mode *adjusted_mode)
571{
572 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
573 struct nouveau_connector *nv_connector;
574
575 nv_connector = nouveau_encoder_connector_get(nv_encoder);
576 if (nv_connector && nv_connector->native_mode) {
577 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
578 int id = adjusted_mode->base.id;
579 *adjusted_mode = *nv_connector->native_mode;
580 adjusted_mode->base.id = id;
581 }
582 }
583
584 return true;
585}
586
587static void
588nvd0_dac_prepare(struct drm_encoder *encoder)
589{
590}
591
592static void
593nvd0_dac_commit(struct drm_encoder *encoder)
594{
595}
596
597static void
598nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
599 struct drm_display_mode *adjusted_mode)
600{
601 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
602 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
603 u32 *push;
604
605 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
606
607 push = evo_wait(encoder->dev, 0, 2);
608 if (push) {
609 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
610 evo_data(push, 1 << nv_crtc->index);
611 evo_kick(push, encoder->dev, 0);
612 }
613
614 nv_encoder->crtc = encoder->crtc;
615}
616
617static void
618nvd0_dac_disconnect(struct drm_encoder *encoder)
619{
620 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
621 struct drm_device *dev = encoder->dev;
622 u32 *push;
623
624 if (nv_encoder->crtc) {
625 nvd0_crtc_prepare(nv_encoder->crtc);
626
627 push = evo_wait(dev, 0, 4);
628 if (push) {
629 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
630 evo_data(push, 0x00000000);
631 evo_mthd(push, 0x0080, 1);
632 evo_data(push, 0x00000000);
633 evo_kick(push, dev, 0);
634 }
635
636 nv_encoder->crtc = NULL;
637 }
638}
639
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000640static enum drm_connector_status
641nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
642{
643 return connector_status_disconnected;
644}
645
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000646static void
647nvd0_dac_destroy(struct drm_encoder *encoder)
648{
649 drm_encoder_cleanup(encoder);
650 kfree(encoder);
651}
652
653static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
654 .dpms = nvd0_dac_dpms,
655 .mode_fixup = nvd0_dac_mode_fixup,
656 .prepare = nvd0_dac_prepare,
657 .commit = nvd0_dac_commit,
658 .mode_set = nvd0_dac_mode_set,
659 .disable = nvd0_dac_disconnect,
660 .get_crtc = nvd0_display_crtc_get,
Ben Skeggsb6d8e7e2011-07-07 09:51:29 +1000661 .detect = nvd0_dac_detect
Ben Skeggs8eaa9662011-07-06 15:25:47 +1000662};
663
664static const struct drm_encoder_funcs nvd0_dac_func = {
665 .destroy = nvd0_dac_destroy,
666};
667
668static int
669nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
670{
671 struct drm_device *dev = connector->dev;
672 struct nouveau_encoder *nv_encoder;
673 struct drm_encoder *encoder;
674
675 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
676 if (!nv_encoder)
677 return -ENOMEM;
678 nv_encoder->dcb = dcbe;
679 nv_encoder->or = ffs(dcbe->or) - 1;
680
681 encoder = to_drm_encoder(nv_encoder);
682 encoder->possible_crtcs = dcbe->heads;
683 encoder->possible_clones = 0;
684 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
685 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
686
687 drm_mode_connector_attach_encoder(connector, encoder);
688 return 0;
689}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000690
691/******************************************************************************
692 * SOR
693 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000694static void
695nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
696{
697 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
698 struct drm_device *dev = encoder->dev;
699 struct drm_encoder *partner;
700 int or = nv_encoder->or;
701 u32 dpms_ctrl;
702
703 nv_encoder->last_dpms = mode;
704
705 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
706 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
707
708 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
709 continue;
710
711 if (nv_partner != nv_encoder &&
712 nv_partner->dcb->or == nv_encoder->or) {
713 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
714 return;
715 break;
716 }
717 }
718
719 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
720 dpms_ctrl |= 0x80000000;
721
722 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
723 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
724 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
725 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
726}
727
728static bool
729nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
730 struct drm_display_mode *adjusted_mode)
731{
732 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
733 struct nouveau_connector *nv_connector;
734
735 nv_connector = nouveau_encoder_connector_get(nv_encoder);
736 if (nv_connector && nv_connector->native_mode) {
737 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
738 int id = adjusted_mode->base.id;
739 *adjusted_mode = *nv_connector->native_mode;
740 adjusted_mode->base.id = id;
741 }
742 }
743
744 return true;
745}
746
747static void
748nvd0_sor_prepare(struct drm_encoder *encoder)
749{
750}
751
752static void
753nvd0_sor_commit(struct drm_encoder *encoder)
754{
755}
756
757static void
758nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
759 struct drm_display_mode *adjusted_mode)
760{
761 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
762 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
763 u32 mode_ctrl = (1 << nv_crtc->index);
764 u32 *push;
765
766 if (nv_encoder->dcb->sorconf.link & 1) {
767 if (adjusted_mode->clock < 165000)
768 mode_ctrl |= 0x00000100;
769 else
770 mode_ctrl |= 0x00000500;
771 } else {
772 mode_ctrl |= 0x00000200;
773 }
774
775 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
776
777 push = evo_wait(encoder->dev, 0, 2);
778 if (push) {
779 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
780 evo_data(push, mode_ctrl);
Ben Skeggs438d99e2011-07-05 16:48:06 +1000781 evo_kick(push, encoder->dev, 0);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000782 }
783
784 nv_encoder->crtc = encoder->crtc;
785}
786
787static void
788nvd0_sor_disconnect(struct drm_encoder *encoder)
789{
790 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
791 struct drm_device *dev = encoder->dev;
Ben Skeggs438d99e2011-07-05 16:48:06 +1000792 u32 *push;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000793
794 if (nv_encoder->crtc) {
Ben Skeggs438d99e2011-07-05 16:48:06 +1000795 nvd0_crtc_prepare(nv_encoder->crtc);
796
797 push = evo_wait(dev, 0, 4);
Ben Skeggs83fc0832011-07-05 13:08:40 +1000798 if (push) {
799 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
800 evo_data(push, 0x00000000);
801 evo_mthd(push, 0x0080, 1);
802 evo_data(push, 0x00000000);
803 evo_kick(push, dev, 0);
804 }
805
806 nv_encoder->crtc = NULL;
807 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
808 }
809}
810
811static void
812nvd0_sor_destroy(struct drm_encoder *encoder)
813{
814 drm_encoder_cleanup(encoder);
815 kfree(encoder);
816}
817
818static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
819 .dpms = nvd0_sor_dpms,
820 .mode_fixup = nvd0_sor_mode_fixup,
821 .prepare = nvd0_sor_prepare,
822 .commit = nvd0_sor_commit,
823 .mode_set = nvd0_sor_mode_set,
824 .disable = nvd0_sor_disconnect,
825 .get_crtc = nvd0_display_crtc_get,
826};
827
828static const struct drm_encoder_funcs nvd0_sor_func = {
829 .destroy = nvd0_sor_destroy,
830};
831
832static int
833nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
834{
835 struct drm_device *dev = connector->dev;
836 struct nouveau_encoder *nv_encoder;
837 struct drm_encoder *encoder;
838
839 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
840 if (!nv_encoder)
841 return -ENOMEM;
842 nv_encoder->dcb = dcbe;
843 nv_encoder->or = ffs(dcbe->or) - 1;
844 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
845
846 encoder = to_drm_encoder(nv_encoder);
847 encoder->possible_crtcs = dcbe->heads;
848 encoder->possible_clones = 0;
849 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
850 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
851
852 drm_mode_connector_attach_encoder(connector, encoder);
853 return 0;
854}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000855
856/******************************************************************************
857 * IRQ
858 *****************************************************************************/
Ben Skeggs46005222011-07-05 11:01:13 +1000859static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000860nvd0_display_unk1_handler(struct drm_device *dev)
861{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000862 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
863 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000864
865 nv_wr32(dev, 0x6101d4, 0x00000000);
866 nv_wr32(dev, 0x6109d4, 0x00000000);
867 nv_wr32(dev, 0x6101d0, 0x80000000);
868}
869
870static void
871nvd0_display_unk2_handler(struct drm_device *dev)
872{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000873 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
874 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000875
876 nv_wr32(dev, 0x6101d4, 0x00000000);
877 nv_wr32(dev, 0x6109d4, 0x00000000);
878 nv_wr32(dev, 0x6101d0, 0x80000000);
879}
880
881static void
882nvd0_display_unk4_handler(struct drm_device *dev)
883{
Ben Skeggsa36f04c2011-07-06 14:39:23 +1000884 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
885 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
Ben Skeggs270a5742011-07-05 14:16:05 +1000886
887 nv_wr32(dev, 0x6101d4, 0x00000000);
888 nv_wr32(dev, 0x6109d4, 0x00000000);
889 nv_wr32(dev, 0x6101d0, 0x80000000);
890}
891
892static void
Ben Skeggs46005222011-07-05 11:01:13 +1000893nvd0_display_intr(struct drm_device *dev)
894{
895 u32 intr = nv_rd32(dev, 0x610088);
896
897 if (intr & 0x00000002) {
898 u32 stat = nv_rd32(dev, 0x61009c);
899 int chid = ffs(stat) - 1;
900 if (chid >= 0) {
901 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
902 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
903 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
904
905 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
906 "0x%08x 0x%08x\n",
907 chid, (mthd & 0x0000ffc), data, mthd, unkn);
908 nv_wr32(dev, 0x61009c, (1 << chid));
909 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
910 }
911
912 intr &= ~0x00000002;
913 }
914
Ben Skeggs270a5742011-07-05 14:16:05 +1000915 if (intr & 0x00100000) {
916 u32 stat = nv_rd32(dev, 0x6100ac);
917
918 if (stat & 0x00000007) {
919 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
920
921 if (stat & 0x00000001)
922 nvd0_display_unk1_handler(dev);
923 if (stat & 0x00000002)
924 nvd0_display_unk2_handler(dev);
925 if (stat & 0x00000004)
926 nvd0_display_unk4_handler(dev);
927 stat &= ~0x00000007;
928 }
929
930 if (stat) {
931 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
932 nv_wr32(dev, 0x6100ac, stat);
933 }
934
935 intr &= ~0x00100000;
936 }
937
Ben Skeggs46005222011-07-05 11:01:13 +1000938 if (intr & 0x01000000) {
939 u32 stat = nv_rd32(dev, 0x6100bc);
940 nv_wr32(dev, 0x6100bc, stat);
941 intr &= ~0x01000000;
942 }
943
944 if (intr & 0x02000000) {
945 u32 stat = nv_rd32(dev, 0x6108bc);
946 nv_wr32(dev, 0x6108bc, stat);
947 intr &= ~0x02000000;
948 }
949
950 if (intr)
951 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
952}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000953
954/******************************************************************************
955 * Init
956 *****************************************************************************/
957static void
958nvd0_display_fini(struct drm_device *dev)
959{
960 int i;
961
962 /* fini cursors */
963 for (i = 14; i >= 13; i--) {
964 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
965 continue;
966
967 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
968 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
969 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
970 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
971 }
972
973 /* fini master */
974 if (nv_rd32(dev, 0x610490) & 0x00000010) {
975 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
976 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
977 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
978 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
979 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
980 }
981}
982
983int
984nvd0_display_init(struct drm_device *dev)
985{
986 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000987 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000988 int i;
989
990 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
991 nv_wr32(dev, 0x6100ac, 0x00000100);
992 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
993 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
994 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
995 nv_rd32(dev, 0x6194e8));
996 return -EBUSY;
997 }
998 }
999
Ben Skeggsa36f04c2011-07-06 14:39:23 +10001000 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1001 * work at all unless you do the SOR part below.
1002 */
1003 for (i = 0; i < 3; i++) {
1004 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1005 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1006 }
1007
1008 for (i = 0; i < 4; i++) {
1009 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1010 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1011 }
1012
1013 for (i = 0; i < 2; i++) {
1014 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1015 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1016 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1017 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1018 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1019 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1020 }
1021
1022 /* point at our hash table / objects, enable interrupts */
Ben Skeggs26f6d882011-07-04 16:25:18 +10001023 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +10001024 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001025
1026 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +10001027 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001028 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001029 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001030 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1031 nv_wr32(dev, 0x640000, 0x00000000);
1032 nv_wr32(dev, 0x610490, 0x01000013);
1033 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1034 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1035 nv_rd32(dev, 0x610490));
1036 return -EBUSY;
1037 }
1038 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1039 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1040
1041 /* init cursors */
1042 for (i = 13; i <= 14; i++) {
1043 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1044 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1045 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1046 nv_rd32(dev, 0x610490 + (i * 0x10)));
1047 return -EBUSY;
1048 }
1049
1050 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1051 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1052 }
1053
Ben Skeggsefd272a2011-07-05 11:58:58 +10001054 push = evo_wait(dev, 0, 32);
1055 if (!push)
1056 return -EBUSY;
1057 evo_mthd(push, 0x0088, 1);
1058 evo_data(push, MEM_SYNC);
1059 evo_mthd(push, 0x0084, 1);
1060 evo_data(push, 0x00000000);
1061 evo_mthd(push, 0x0084, 1);
1062 evo_data(push, 0x80000000);
1063 evo_mthd(push, 0x008c, 1);
1064 evo_data(push, 0x00000000);
1065 evo_kick(push, dev, 0);
1066
Ben Skeggs26f6d882011-07-04 16:25:18 +10001067 return 0;
1068}
1069
1070void
1071nvd0_display_destroy(struct drm_device *dev)
1072{
1073 struct drm_nouveau_private *dev_priv = dev->dev_private;
1074 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +10001075 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001076
1077 nvd0_display_fini(dev);
1078
Ben Skeggs51beb422011-07-05 10:33:08 +10001079 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001080 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +10001081 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +10001082
1083 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001084 kfree(disp);
1085}
1086
1087int
1088nvd0_display_create(struct drm_device *dev)
1089{
1090 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +10001091 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001092 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1093 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +10001094 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001095 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001096 struct dcb_entry *dcbe;
1097 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +10001098
1099 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1100 if (!disp)
1101 return -ENOMEM;
1102 dev_priv->engine.display.priv = disp;
1103
Ben Skeggs438d99e2011-07-05 16:48:06 +10001104 /* create crtc objects to represent the hw heads */
1105 for (i = 0; i < 2; i++) {
1106 ret = nvd0_crtc_create(dev, i);
1107 if (ret)
1108 goto out;
1109 }
1110
Ben Skeggs83fc0832011-07-05 13:08:40 +10001111 /* create encoder/connector objects based on VBIOS DCB table */
1112 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1113 connector = nouveau_connector_create(dev, dcbe->connector);
1114 if (IS_ERR(connector))
1115 continue;
1116
1117 if (dcbe->location != DCB_LOC_ON_CHIP) {
1118 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1119 dcbe->type, ffs(dcbe->or) - 1);
1120 continue;
1121 }
1122
1123 switch (dcbe->type) {
1124 case OUTPUT_TMDS:
1125 nvd0_sor_create(connector, dcbe);
1126 break;
Ben Skeggs8eaa9662011-07-06 15:25:47 +10001127 case OUTPUT_ANALOG:
1128 nvd0_dac_create(connector, dcbe);
1129 break;
Ben Skeggs83fc0832011-07-05 13:08:40 +10001130 default:
1131 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1132 dcbe->type, ffs(dcbe->or) - 1);
1133 continue;
1134 }
1135 }
1136
1137 /* cull any connectors we created that don't have an encoder */
1138 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1139 if (connector->encoder_ids[0])
1140 continue;
1141
1142 NV_WARN(dev, "%s has no encoders, removing\n",
1143 drm_get_connector_name(connector));
1144 connector->funcs->destroy(connector);
1145 }
1146
Ben Skeggs46005222011-07-05 11:01:13 +10001147 /* setup interrupt handling */
1148 nouveau_irq_register(dev, 26, nvd0_display_intr);
1149
Ben Skeggs51beb422011-07-05 10:33:08 +10001150 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +10001151 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1152 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +10001153 if (ret)
1154 goto out;
1155
Ben Skeggsefd272a2011-07-05 11:58:58 +10001156 nv_wo32(disp->mem, 0x1000, 0x00000049);
1157 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1158 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1159 nv_wo32(disp->mem, 0x100c, 0x00000000);
1160 nv_wo32(disp->mem, 0x1010, 0x00000000);
1161 nv_wo32(disp->mem, 0x1014, 0x00000000);
1162 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1163 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1164
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001165 nv_wo32(disp->mem, 0x1020, 0x00000049);
Ben Skeggsefd272a2011-07-05 11:58:58 +10001166 nv_wo32(disp->mem, 0x1024, 0x00000000);
1167 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1168 nv_wo32(disp->mem, 0x102c, 0x00000000);
1169 nv_wo32(disp->mem, 0x1030, 0x00000000);
1170 nv_wo32(disp->mem, 0x1034, 0x00000000);
1171 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1172 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1173
Ben Skeggsc0cc92a2011-07-06 11:40:45 +10001174 nv_wo32(disp->mem, 0x1040, 0x00000009);
1175 nv_wo32(disp->mem, 0x1044, 0x00000000);
1176 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1177 nv_wo32(disp->mem, 0x104c, 0x00000000);
1178 nv_wo32(disp->mem, 0x1050, 0x00000000);
1179 nv_wo32(disp->mem, 0x1054, 0x00000000);
1180 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1181 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1182
1183 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1184 nv_wo32(disp->mem, 0x1064, 0x00000000);
1185 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1186 nv_wo32(disp->mem, 0x106c, 0x00000000);
1187 nv_wo32(disp->mem, 0x1070, 0x00000000);
1188 nv_wo32(disp->mem, 0x1074, 0x00000000);
1189 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1190 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1191
Ben Skeggsefd272a2011-07-05 11:58:58 +10001192 pinstmem->flush(dev);
1193
Ben Skeggs51beb422011-07-05 10:33:08 +10001194 /* push buffers for evo channels */
1195 disp->evo[0].ptr =
1196 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1197 if (!disp->evo[0].ptr) {
1198 ret = -ENOMEM;
1199 goto out;
1200 }
1201
Ben Skeggs26f6d882011-07-04 16:25:18 +10001202 ret = nvd0_display_init(dev);
1203 if (ret)
1204 goto out;
1205
1206out:
1207 if (ret)
1208 nvd0_display_destroy(dev);
1209 return ret;
1210}