blob: 65e48f953d4c74fa77992e8be911330149e6f74b [file] [log] [blame]
Ben Skeggs26f6d882011-07-04 16:25:18 +10001/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
Ben Skeggs51beb422011-07-05 10:33:08 +100025#include <linux/dma-mapping.h>
Ben Skeggs83fc0832011-07-05 13:08:40 +100026
Ben Skeggs26f6d882011-07-04 16:25:18 +100027#include "drmP.h"
Ben Skeggs83fc0832011-07-05 13:08:40 +100028#include "drm_crtc_helper.h"
Ben Skeggs26f6d882011-07-04 16:25:18 +100029
30#include "nouveau_drv.h"
31#include "nouveau_connector.h"
32#include "nouveau_encoder.h"
33#include "nouveau_crtc.h"
34
Ben Skeggsefd272a2011-07-05 11:58:58 +100035#define MEM_SYNC 0xe0000001
36#define MEM_VRAM 0xe0010000
37
Ben Skeggs26f6d882011-07-04 16:25:18 +100038struct nvd0_display {
39 struct nouveau_gpuobj *mem;
Ben Skeggs51beb422011-07-05 10:33:08 +100040 struct {
41 dma_addr_t handle;
42 u32 *ptr;
43 } evo[1];
Ben Skeggs26f6d882011-07-04 16:25:18 +100044};
45
46static struct nvd0_display *
47nvd0_display(struct drm_device *dev)
48{
49 struct drm_nouveau_private *dev_priv = dev->dev_private;
50 return dev_priv->engine.display.priv;
51}
52
Ben Skeggs51beb422011-07-05 10:33:08 +100053static int
54evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
55{
56 int ret = 0;
57 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
58 nv_wr32(dev, 0x610704 + (id * 0x10), data);
59 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
60 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
61 ret = -EBUSY;
62 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
63 return ret;
64}
65
66static u32 *
67evo_wait(struct drm_device *dev, int id, int nr)
68{
69 struct nvd0_display *disp = nvd0_display(dev);
70 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
71
72 if (put + nr >= (PAGE_SIZE / 4)) {
73 disp->evo[id].ptr[put] = 0x20000000;
74
75 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
76 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
77 NV_ERROR(dev, "evo %d dma stalled\n", id);
78 return NULL;
79 }
80
81 put = 0;
82 }
83
84 return disp->evo[id].ptr + put;
85}
86
87static void
88evo_kick(u32 *push, struct drm_device *dev, int id)
89{
90 struct nvd0_display *disp = nvd0_display(dev);
91 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
92}
93
94#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
95#define evo_data(p,d) *((p)++) = (d)
96
Ben Skeggs83fc0832011-07-05 13:08:40 +100097static struct drm_crtc *
98nvd0_display_crtc_get(struct drm_encoder *encoder)
99{
100 return nouveau_encoder(encoder)->crtc;
101}
102
Ben Skeggs26f6d882011-07-04 16:25:18 +1000103/******************************************************************************
104 * DAC
105 *****************************************************************************/
106
107/******************************************************************************
108 * SOR
109 *****************************************************************************/
Ben Skeggs83fc0832011-07-05 13:08:40 +1000110static void
111nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
112{
113 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
114 struct drm_device *dev = encoder->dev;
115 struct drm_encoder *partner;
116 int or = nv_encoder->or;
117 u32 dpms_ctrl;
118
119 nv_encoder->last_dpms = mode;
120
121 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
122 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
123
124 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
125 continue;
126
127 if (nv_partner != nv_encoder &&
128 nv_partner->dcb->or == nv_encoder->or) {
129 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
130 return;
131 break;
132 }
133 }
134
135 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
136 dpms_ctrl |= 0x80000000;
137
138 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
139 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
140 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
141 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
142}
143
144static bool
145nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
146 struct drm_display_mode *adjusted_mode)
147{
148 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
149 struct nouveau_connector *nv_connector;
150
151 nv_connector = nouveau_encoder_connector_get(nv_encoder);
152 if (nv_connector && nv_connector->native_mode) {
153 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
154 int id = adjusted_mode->base.id;
155 *adjusted_mode = *nv_connector->native_mode;
156 adjusted_mode->base.id = id;
157 }
158 }
159
160 return true;
161}
162
163static void
164nvd0_sor_prepare(struct drm_encoder *encoder)
165{
166}
167
168static void
169nvd0_sor_commit(struct drm_encoder *encoder)
170{
171}
172
173static void
174nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
175 struct drm_display_mode *adjusted_mode)
176{
177 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
178 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
179 u32 mode_ctrl = (1 << nv_crtc->index);
180 u32 *push;
181
182 if (nv_encoder->dcb->sorconf.link & 1) {
183 if (adjusted_mode->clock < 165000)
184 mode_ctrl |= 0x00000100;
185 else
186 mode_ctrl |= 0x00000500;
187 } else {
188 mode_ctrl |= 0x00000200;
189 }
190
191 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
192
193 push = evo_wait(encoder->dev, 0, 2);
194 if (push) {
195 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
196 evo_data(push, mode_ctrl);
197 }
198
199 nv_encoder->crtc = encoder->crtc;
200}
201
202static void
203nvd0_sor_disconnect(struct drm_encoder *encoder)
204{
205 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
206 struct drm_device *dev = encoder->dev;
207
208 if (nv_encoder->crtc) {
209 u32 *push = evo_wait(dev, 0, 4);
210 if (push) {
211 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
212 evo_data(push, 0x00000000);
213 evo_mthd(push, 0x0080, 1);
214 evo_data(push, 0x00000000);
215 evo_kick(push, dev, 0);
216 }
217
218 nv_encoder->crtc = NULL;
219 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
220 }
221}
222
223static void
224nvd0_sor_destroy(struct drm_encoder *encoder)
225{
226 drm_encoder_cleanup(encoder);
227 kfree(encoder);
228}
229
230static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
231 .dpms = nvd0_sor_dpms,
232 .mode_fixup = nvd0_sor_mode_fixup,
233 .prepare = nvd0_sor_prepare,
234 .commit = nvd0_sor_commit,
235 .mode_set = nvd0_sor_mode_set,
236 .disable = nvd0_sor_disconnect,
237 .get_crtc = nvd0_display_crtc_get,
238};
239
240static const struct drm_encoder_funcs nvd0_sor_func = {
241 .destroy = nvd0_sor_destroy,
242};
243
244static int
245nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
246{
247 struct drm_device *dev = connector->dev;
248 struct nouveau_encoder *nv_encoder;
249 struct drm_encoder *encoder;
250
251 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
252 if (!nv_encoder)
253 return -ENOMEM;
254 nv_encoder->dcb = dcbe;
255 nv_encoder->or = ffs(dcbe->or) - 1;
256 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
257
258 encoder = to_drm_encoder(nv_encoder);
259 encoder->possible_crtcs = dcbe->heads;
260 encoder->possible_clones = 0;
261 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
262 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
263
264 drm_mode_connector_attach_encoder(connector, encoder);
265 return 0;
266}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000267
268/******************************************************************************
269 * IRQ
270 *****************************************************************************/
Ben Skeggs46005222011-07-05 11:01:13 +1000271static void
Ben Skeggs270a5742011-07-05 14:16:05 +1000272nvd0_display_unk1_handler(struct drm_device *dev)
273{
274 u32 unk0 = nv_rd32(dev, 0x6101d0);
275
276 NV_INFO(dev, "PDISP: unk1 0x%08x\n", unk0);
277
278 nv_wr32(dev, 0x6101d4, 0x00000000);
279 nv_wr32(dev, 0x6109d4, 0x00000000);
280 nv_wr32(dev, 0x6101d0, 0x80000000);
281}
282
283static void
284nvd0_display_unk2_handler(struct drm_device *dev)
285{
286 u32 unk0 = nv_rd32(dev, 0x6101d0);
287
288 NV_INFO(dev, "PDISP: unk2 0x%08x\n", unk0);
289
290 nv_wr32(dev, 0x6101d4, 0x00000000);
291 nv_wr32(dev, 0x6109d4, 0x00000000);
292 nv_wr32(dev, 0x6101d0, 0x80000000);
293}
294
295static void
296nvd0_display_unk4_handler(struct drm_device *dev)
297{
298 u32 unk0 = nv_rd32(dev, 0x6101d0);
299
300 NV_INFO(dev, "PDISP: unk4 0x%08x\n", unk0);
301
302 nv_wr32(dev, 0x6101d4, 0x00000000);
303 nv_wr32(dev, 0x6109d4, 0x00000000);
304 nv_wr32(dev, 0x6101d0, 0x80000000);
305}
306
307static void
Ben Skeggs46005222011-07-05 11:01:13 +1000308nvd0_display_intr(struct drm_device *dev)
309{
310 u32 intr = nv_rd32(dev, 0x610088);
311
312 if (intr & 0x00000002) {
313 u32 stat = nv_rd32(dev, 0x61009c);
314 int chid = ffs(stat) - 1;
315 if (chid >= 0) {
316 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
317 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
318 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
319
320 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
321 "0x%08x 0x%08x\n",
322 chid, (mthd & 0x0000ffc), data, mthd, unkn);
323 nv_wr32(dev, 0x61009c, (1 << chid));
324 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
325 }
326
327 intr &= ~0x00000002;
328 }
329
Ben Skeggs270a5742011-07-05 14:16:05 +1000330 if (intr & 0x00100000) {
331 u32 stat = nv_rd32(dev, 0x6100ac);
332
333 if (stat & 0x00000007) {
334 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
335
336 if (stat & 0x00000001)
337 nvd0_display_unk1_handler(dev);
338 if (stat & 0x00000002)
339 nvd0_display_unk2_handler(dev);
340 if (stat & 0x00000004)
341 nvd0_display_unk4_handler(dev);
342 stat &= ~0x00000007;
343 }
344
345 if (stat) {
346 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
347 nv_wr32(dev, 0x6100ac, stat);
348 }
349
350 intr &= ~0x00100000;
351 }
352
Ben Skeggs46005222011-07-05 11:01:13 +1000353 if (intr & 0x01000000) {
354 u32 stat = nv_rd32(dev, 0x6100bc);
355 nv_wr32(dev, 0x6100bc, stat);
356 intr &= ~0x01000000;
357 }
358
359 if (intr & 0x02000000) {
360 u32 stat = nv_rd32(dev, 0x6108bc);
361 nv_wr32(dev, 0x6108bc, stat);
362 intr &= ~0x02000000;
363 }
364
365 if (intr)
366 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
367}
Ben Skeggs26f6d882011-07-04 16:25:18 +1000368
369/******************************************************************************
370 * Init
371 *****************************************************************************/
372static void
373nvd0_display_fini(struct drm_device *dev)
374{
375 int i;
376
377 /* fini cursors */
378 for (i = 14; i >= 13; i--) {
379 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
380 continue;
381
382 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
383 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
384 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
385 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
386 }
387
388 /* fini master */
389 if (nv_rd32(dev, 0x610490) & 0x00000010) {
390 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
391 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
392 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
393 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
394 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
395 }
396}
397
398int
399nvd0_display_init(struct drm_device *dev)
400{
401 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000402 u32 *push;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000403 int i;
404
405 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
406 nv_wr32(dev, 0x6100ac, 0x00000100);
407 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
408 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
409 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
410 nv_rd32(dev, 0x6194e8));
411 return -EBUSY;
412 }
413 }
414
415 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
Ben Skeggs270a5742011-07-05 14:16:05 +1000416 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000417
418 /* init master */
Ben Skeggs51beb422011-07-05 10:33:08 +1000419 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000420 nv_wr32(dev, 0x610498, 0x00010000);
Ben Skeggsefd272a2011-07-05 11:58:58 +1000421 nv_wr32(dev, 0x61049c, 0x00000001);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000422 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
423 nv_wr32(dev, 0x640000, 0x00000000);
424 nv_wr32(dev, 0x610490, 0x01000013);
425 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
426 NV_ERROR(dev, "PDISP: master 0x%08x\n",
427 nv_rd32(dev, 0x610490));
428 return -EBUSY;
429 }
430 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
431 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
432
433 /* init cursors */
434 for (i = 13; i <= 14; i++) {
435 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
436 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
437 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
438 nv_rd32(dev, 0x610490 + (i * 0x10)));
439 return -EBUSY;
440 }
441
442 nv_mask(dev, 0x610090, 1 << i, 1 << i);
443 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
444 }
445
Ben Skeggsefd272a2011-07-05 11:58:58 +1000446 push = evo_wait(dev, 0, 32);
447 if (!push)
448 return -EBUSY;
449 evo_mthd(push, 0x0088, 1);
450 evo_data(push, MEM_SYNC);
451 evo_mthd(push, 0x0084, 1);
452 evo_data(push, 0x00000000);
453 evo_mthd(push, 0x0084, 1);
454 evo_data(push, 0x80000000);
455 evo_mthd(push, 0x008c, 1);
456 evo_data(push, 0x00000000);
457 evo_kick(push, dev, 0);
458
Ben Skeggs26f6d882011-07-04 16:25:18 +1000459 return 0;
460}
461
462void
463nvd0_display_destroy(struct drm_device *dev)
464{
465 struct drm_nouveau_private *dev_priv = dev->dev_private;
466 struct nvd0_display *disp = nvd0_display(dev);
Ben Skeggs51beb422011-07-05 10:33:08 +1000467 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000468
469 nvd0_display_fini(dev);
470
Ben Skeggs51beb422011-07-05 10:33:08 +1000471 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000472 nouveau_gpuobj_ref(NULL, &disp->mem);
Ben Skeggs46005222011-07-05 11:01:13 +1000473 nouveau_irq_unregister(dev, 26);
Ben Skeggs51beb422011-07-05 10:33:08 +1000474
475 dev_priv->engine.display.priv = NULL;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000476 kfree(disp);
477}
478
479int
480nvd0_display_create(struct drm_device *dev)
481{
482 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsefd272a2011-07-05 11:58:58 +1000483 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000484 struct dcb_table *dcb = &dev_priv->vbios.dcb;
485 struct drm_connector *connector, *tmp;
Ben Skeggs51beb422011-07-05 10:33:08 +1000486 struct pci_dev *pdev = dev->pdev;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000487 struct nvd0_display *disp;
Ben Skeggs83fc0832011-07-05 13:08:40 +1000488 struct dcb_entry *dcbe;
489 int ret, i;
Ben Skeggs26f6d882011-07-04 16:25:18 +1000490
491 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
492 if (!disp)
493 return -ENOMEM;
494 dev_priv->engine.display.priv = disp;
495
Ben Skeggs83fc0832011-07-05 13:08:40 +1000496 /* create encoder/connector objects based on VBIOS DCB table */
497 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
498 connector = nouveau_connector_create(dev, dcbe->connector);
499 if (IS_ERR(connector))
500 continue;
501
502 if (dcbe->location != DCB_LOC_ON_CHIP) {
503 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
504 dcbe->type, ffs(dcbe->or) - 1);
505 continue;
506 }
507
508 switch (dcbe->type) {
509 case OUTPUT_TMDS:
510 nvd0_sor_create(connector, dcbe);
511 break;
512 default:
513 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
514 dcbe->type, ffs(dcbe->or) - 1);
515 continue;
516 }
517 }
518
519 /* cull any connectors we created that don't have an encoder */
520 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
521 if (connector->encoder_ids[0])
522 continue;
523
524 NV_WARN(dev, "%s has no encoders, removing\n",
525 drm_get_connector_name(connector));
526 connector->funcs->destroy(connector);
527 }
528
Ben Skeggs46005222011-07-05 11:01:13 +1000529 /* setup interrupt handling */
530 nouveau_irq_register(dev, 26, nvd0_display_intr);
531
Ben Skeggs51beb422011-07-05 10:33:08 +1000532 /* hash table and dma objects for the memory areas we care about */
Ben Skeggsefd272a2011-07-05 11:58:58 +1000533 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
534 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
Ben Skeggs26f6d882011-07-04 16:25:18 +1000535 if (ret)
536 goto out;
537
Ben Skeggsefd272a2011-07-05 11:58:58 +1000538 nv_wo32(disp->mem, 0x1000, 0x00000049);
539 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
540 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
541 nv_wo32(disp->mem, 0x100c, 0x00000000);
542 nv_wo32(disp->mem, 0x1010, 0x00000000);
543 nv_wo32(disp->mem, 0x1014, 0x00000000);
544 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
545 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
546
547 nv_wo32(disp->mem, 0x1020, 0x00000009);
548 nv_wo32(disp->mem, 0x1024, 0x00000000);
549 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
550 nv_wo32(disp->mem, 0x102c, 0x00000000);
551 nv_wo32(disp->mem, 0x1030, 0x00000000);
552 nv_wo32(disp->mem, 0x1034, 0x00000000);
553 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
554 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
555
556 pinstmem->flush(dev);
557
Ben Skeggs51beb422011-07-05 10:33:08 +1000558 /* push buffers for evo channels */
559 disp->evo[0].ptr =
560 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
561 if (!disp->evo[0].ptr) {
562 ret = -ENOMEM;
563 goto out;
564 }
565
Ben Skeggs26f6d882011-07-04 16:25:18 +1000566 ret = nvd0_display_init(dev);
567 if (ret)
568 goto out;
569
570out:
571 if (ret)
572 nvd0_display_destroy(dev);
573 return ret;
574}