blob: 8a0402012557bff82f8df5f3bd15716d55a58acd [file] [log] [blame]
Ben Skeggs6ee73862009-12-11 19:24:15 +10001#include "drmP.h"
2#include "drm.h"
3#include "nouveau_drv.h"
4#include "nouveau_drm.h"
5
6/*
7 * NV20
8 * -----
9 * There are 3 families :
10 * NV20 is 0x10de:0x020*
11 * NV25/28 is 0x10de:0x025* / 0x10de:0x028*
12 * NV2A is 0x10de:0x02A0
13 *
14 * NV30
15 * -----
16 * There are 3 families :
17 * NV30/31 is 0x10de:0x030* / 0x10de:0x031*
18 * NV34 is 0x10de:0x032*
19 * NV35/36 is 0x10de:0x033* / 0x10de:0x034*
20 *
21 * Not seen in the wild, no dumps (probably NV35) :
22 * NV37 is 0x10de:0x00fc, 0x10de:0x00fd
23 * NV38 is 0x10de:0x0333, 0x10de:0x00fe
24 *
25 */
26
27#define NV20_GRCTX_SIZE (3580*4)
28#define NV25_GRCTX_SIZE (3529*4)
29#define NV2A_GRCTX_SIZE (3500*4)
30
31#define NV30_31_GRCTX_SIZE (24392)
32#define NV34_GRCTX_SIZE (18140)
33#define NV35_36_GRCTX_SIZE (22396)
34
35static void
36nv20_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
37{
38 int i;
39
Ben Skeggsb3beb162010-09-01 15:24:29 +100040 nv_wo32(ctx, 0x033c, 0xffff0000);
41 nv_wo32(ctx, 0x03a0, 0x0fff0000);
42 nv_wo32(ctx, 0x03a4, 0x0fff0000);
43 nv_wo32(ctx, 0x047c, 0x00000101);
44 nv_wo32(ctx, 0x0490, 0x00000111);
45 nv_wo32(ctx, 0x04a8, 0x44400000);
Ben Skeggs6ee73862009-12-11 19:24:15 +100046 for (i = 0x04d4; i <= 0x04e0; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100047 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +100048 for (i = 0x04f4; i <= 0x0500; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100049 nv_wo32(ctx, i, 0x00080000);
Ben Skeggs6ee73862009-12-11 19:24:15 +100050 for (i = 0x050c; i <= 0x0518; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100051 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +100052 for (i = 0x051c; i <= 0x0528; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100053 nv_wo32(ctx, i, 0x000105b8);
Ben Skeggs6ee73862009-12-11 19:24:15 +100054 for (i = 0x052c; i <= 0x0538; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100055 nv_wo32(ctx, i, 0x00080008);
Ben Skeggs6ee73862009-12-11 19:24:15 +100056 for (i = 0x055c; i <= 0x0598; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100057 nv_wo32(ctx, i, 0x07ff0000);
58 nv_wo32(ctx, 0x05a4, 0x4b7fffff);
59 nv_wo32(ctx, 0x05fc, 0x00000001);
60 nv_wo32(ctx, 0x0604, 0x00004000);
61 nv_wo32(ctx, 0x0610, 0x00000001);
62 nv_wo32(ctx, 0x0618, 0x00040000);
63 nv_wo32(ctx, 0x061c, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +100064 for (i = 0x1c1c; i <= 0x248c; i += 16) {
Ben Skeggsb3beb162010-09-01 15:24:29 +100065 nv_wo32(ctx, (i + 0), 0x10700ff9);
66 nv_wo32(ctx, (i + 4), 0x0436086c);
67 nv_wo32(ctx, (i + 8), 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +100068 }
Ben Skeggsb3beb162010-09-01 15:24:29 +100069 nv_wo32(ctx, 0x281c, 0x3f800000);
70 nv_wo32(ctx, 0x2830, 0x3f800000);
71 nv_wo32(ctx, 0x285c, 0x40000000);
72 nv_wo32(ctx, 0x2860, 0x3f800000);
73 nv_wo32(ctx, 0x2864, 0x3f000000);
74 nv_wo32(ctx, 0x286c, 0x40000000);
75 nv_wo32(ctx, 0x2870, 0x3f800000);
76 nv_wo32(ctx, 0x2878, 0xbf800000);
77 nv_wo32(ctx, 0x2880, 0xbf800000);
78 nv_wo32(ctx, 0x34a4, 0x000fe000);
79 nv_wo32(ctx, 0x3530, 0x000003f8);
80 nv_wo32(ctx, 0x3540, 0x002fe000);
Ben Skeggs6ee73862009-12-11 19:24:15 +100081 for (i = 0x355c; i <= 0x3578; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +100082 nv_wo32(ctx, i, 0x001c527c);
Ben Skeggs6ee73862009-12-11 19:24:15 +100083}
84
85static void
86nv25_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
87{
88 int i;
89
Ben Skeggsb3beb162010-09-01 15:24:29 +100090 nv_wo32(ctx, 0x035c, 0xffff0000);
91 nv_wo32(ctx, 0x03c0, 0x0fff0000);
92 nv_wo32(ctx, 0x03c4, 0x0fff0000);
93 nv_wo32(ctx, 0x049c, 0x00000101);
94 nv_wo32(ctx, 0x04b0, 0x00000111);
95 nv_wo32(ctx, 0x04c8, 0x00000080);
96 nv_wo32(ctx, 0x04cc, 0xffff0000);
97 nv_wo32(ctx, 0x04d0, 0x00000001);
98 nv_wo32(ctx, 0x04e4, 0x44400000);
99 nv_wo32(ctx, 0x04fc, 0x4b800000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000100 for (i = 0x0510; i <= 0x051c; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000101 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000102 for (i = 0x0530; i <= 0x053c; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000103 nv_wo32(ctx, i, 0x00080000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000104 for (i = 0x0548; i <= 0x0554; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000105 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000106 for (i = 0x0558; i <= 0x0564; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000107 nv_wo32(ctx, i, 0x000105b8);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000108 for (i = 0x0568; i <= 0x0574; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000109 nv_wo32(ctx, i, 0x00080008);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000110 for (i = 0x0598; i <= 0x05d4; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000111 nv_wo32(ctx, i, 0x07ff0000);
112 nv_wo32(ctx, 0x05e0, 0x4b7fffff);
113 nv_wo32(ctx, 0x0620, 0x00000080);
114 nv_wo32(ctx, 0x0624, 0x30201000);
115 nv_wo32(ctx, 0x0628, 0x70605040);
116 nv_wo32(ctx, 0x062c, 0xb0a09080);
117 nv_wo32(ctx, 0x0630, 0xf0e0d0c0);
118 nv_wo32(ctx, 0x0664, 0x00000001);
119 nv_wo32(ctx, 0x066c, 0x00004000);
120 nv_wo32(ctx, 0x0678, 0x00000001);
121 nv_wo32(ctx, 0x0680, 0x00040000);
122 nv_wo32(ctx, 0x0684, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000123 for (i = 0x1b04; i <= 0x2374; i += 16) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000124 nv_wo32(ctx, (i + 0), 0x10700ff9);
125 nv_wo32(ctx, (i + 4), 0x0436086c);
126 nv_wo32(ctx, (i + 8), 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000127 }
Ben Skeggsb3beb162010-09-01 15:24:29 +1000128 nv_wo32(ctx, 0x2704, 0x3f800000);
129 nv_wo32(ctx, 0x2718, 0x3f800000);
130 nv_wo32(ctx, 0x2744, 0x40000000);
131 nv_wo32(ctx, 0x2748, 0x3f800000);
132 nv_wo32(ctx, 0x274c, 0x3f000000);
133 nv_wo32(ctx, 0x2754, 0x40000000);
134 nv_wo32(ctx, 0x2758, 0x3f800000);
135 nv_wo32(ctx, 0x2760, 0xbf800000);
136 nv_wo32(ctx, 0x2768, 0xbf800000);
137 nv_wo32(ctx, 0x308c, 0x000fe000);
138 nv_wo32(ctx, 0x3108, 0x000003f8);
139 nv_wo32(ctx, 0x3468, 0x002fe000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000140 for (i = 0x3484; i <= 0x34a0; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000141 nv_wo32(ctx, i, 0x001c527c);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000142}
143
144static void
145nv2a_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
146{
147 int i;
148
Ben Skeggsb3beb162010-09-01 15:24:29 +1000149 nv_wo32(ctx, 0x033c, 0xffff0000);
150 nv_wo32(ctx, 0x03a0, 0x0fff0000);
151 nv_wo32(ctx, 0x03a4, 0x0fff0000);
152 nv_wo32(ctx, 0x047c, 0x00000101);
153 nv_wo32(ctx, 0x0490, 0x00000111);
154 nv_wo32(ctx, 0x04a8, 0x44400000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000155 for (i = 0x04d4; i <= 0x04e0; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000156 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000157 for (i = 0x04f4; i <= 0x0500; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000158 nv_wo32(ctx, i, 0x00080000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000159 for (i = 0x050c; i <= 0x0518; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000160 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000161 for (i = 0x051c; i <= 0x0528; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000162 nv_wo32(ctx, i, 0x000105b8);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000163 for (i = 0x052c; i <= 0x0538; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000164 nv_wo32(ctx, i, 0x00080008);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000165 for (i = 0x055c; i <= 0x0598; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000166 nv_wo32(ctx, i, 0x07ff0000);
167 nv_wo32(ctx, 0x05a4, 0x4b7fffff);
168 nv_wo32(ctx, 0x05fc, 0x00000001);
169 nv_wo32(ctx, 0x0604, 0x00004000);
170 nv_wo32(ctx, 0x0610, 0x00000001);
171 nv_wo32(ctx, 0x0618, 0x00040000);
172 nv_wo32(ctx, 0x061c, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000173 for (i = 0x1a9c; i <= 0x22fc; i += 16) { /*XXX: check!! */
Ben Skeggsb3beb162010-09-01 15:24:29 +1000174 nv_wo32(ctx, (i + 0), 0x10700ff9);
175 nv_wo32(ctx, (i + 4), 0x0436086c);
176 nv_wo32(ctx, (i + 8), 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000177 }
Ben Skeggsb3beb162010-09-01 15:24:29 +1000178 nv_wo32(ctx, 0x269c, 0x3f800000);
179 nv_wo32(ctx, 0x26b0, 0x3f800000);
180 nv_wo32(ctx, 0x26dc, 0x40000000);
181 nv_wo32(ctx, 0x26e0, 0x3f800000);
182 nv_wo32(ctx, 0x26e4, 0x3f000000);
183 nv_wo32(ctx, 0x26ec, 0x40000000);
184 nv_wo32(ctx, 0x26f0, 0x3f800000);
185 nv_wo32(ctx, 0x26f8, 0xbf800000);
186 nv_wo32(ctx, 0x2700, 0xbf800000);
187 nv_wo32(ctx, 0x3024, 0x000fe000);
188 nv_wo32(ctx, 0x30a0, 0x000003f8);
189 nv_wo32(ctx, 0x33fc, 0x002fe000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000190 for (i = 0x341c; i <= 0x3438; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000191 nv_wo32(ctx, i, 0x001c527c);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000192}
193
194static void
195nv30_31_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
196{
197 int i;
198
Ben Skeggsb3beb162010-09-01 15:24:29 +1000199 nv_wo32(ctx, 0x0410, 0x00000101);
200 nv_wo32(ctx, 0x0424, 0x00000111);
201 nv_wo32(ctx, 0x0428, 0x00000060);
202 nv_wo32(ctx, 0x0444, 0x00000080);
203 nv_wo32(ctx, 0x0448, 0xffff0000);
204 nv_wo32(ctx, 0x044c, 0x00000001);
205 nv_wo32(ctx, 0x0460, 0x44400000);
206 nv_wo32(ctx, 0x048c, 0xffff0000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000207 for (i = 0x04e0; i < 0x04e8; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000208 nv_wo32(ctx, i, 0x0fff0000);
209 nv_wo32(ctx, 0x04ec, 0x00011100);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000210 for (i = 0x0508; i < 0x0548; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000211 nv_wo32(ctx, i, 0x07ff0000);
212 nv_wo32(ctx, 0x0550, 0x4b7fffff);
213 nv_wo32(ctx, 0x058c, 0x00000080);
214 nv_wo32(ctx, 0x0590, 0x30201000);
215 nv_wo32(ctx, 0x0594, 0x70605040);
216 nv_wo32(ctx, 0x0598, 0xb8a89888);
217 nv_wo32(ctx, 0x059c, 0xf8e8d8c8);
218 nv_wo32(ctx, 0x05b0, 0xb0000000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000219 for (i = 0x0600; i < 0x0640; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000220 nv_wo32(ctx, i, 0x00010588);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000221 for (i = 0x0640; i < 0x0680; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000222 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000223 for (i = 0x06c0; i < 0x0700; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000224 nv_wo32(ctx, i, 0x0008aae4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000225 for (i = 0x0700; i < 0x0740; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000226 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000227 for (i = 0x0740; i < 0x0780; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000228 nv_wo32(ctx, i, 0x00080008);
229 nv_wo32(ctx, 0x085c, 0x00040000);
230 nv_wo32(ctx, 0x0860, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000231 for (i = 0x0864; i < 0x0874; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000232 nv_wo32(ctx, i, 0x00040004);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000233 for (i = 0x1f18; i <= 0x3088 ; i += 16) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000234 nv_wo32(ctx, i + 0, 0x10700ff9);
235 nv_wo32(ctx, i + 1, 0x0436086c);
236 nv_wo32(ctx, i + 2, 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000237 }
238 for (i = 0x30b8; i < 0x30c8; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000239 nv_wo32(ctx, i, 0x0000ffff);
240 nv_wo32(ctx, 0x344c, 0x3f800000);
241 nv_wo32(ctx, 0x3808, 0x3f800000);
242 nv_wo32(ctx, 0x381c, 0x3f800000);
243 nv_wo32(ctx, 0x3848, 0x40000000);
244 nv_wo32(ctx, 0x384c, 0x3f800000);
245 nv_wo32(ctx, 0x3850, 0x3f000000);
246 nv_wo32(ctx, 0x3858, 0x40000000);
247 nv_wo32(ctx, 0x385c, 0x3f800000);
248 nv_wo32(ctx, 0x3864, 0xbf800000);
249 nv_wo32(ctx, 0x386c, 0xbf800000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000250}
251
252static void
253nv34_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
254{
255 int i;
256
Ben Skeggsb3beb162010-09-01 15:24:29 +1000257 nv_wo32(ctx, 0x040c, 0x01000101);
258 nv_wo32(ctx, 0x0420, 0x00000111);
259 nv_wo32(ctx, 0x0424, 0x00000060);
260 nv_wo32(ctx, 0x0440, 0x00000080);
261 nv_wo32(ctx, 0x0444, 0xffff0000);
262 nv_wo32(ctx, 0x0448, 0x00000001);
263 nv_wo32(ctx, 0x045c, 0x44400000);
264 nv_wo32(ctx, 0x0480, 0xffff0000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000265 for (i = 0x04d4; i < 0x04dc; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000266 nv_wo32(ctx, i, 0x0fff0000);
267 nv_wo32(ctx, 0x04e0, 0x00011100);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000268 for (i = 0x04fc; i < 0x053c; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000269 nv_wo32(ctx, i, 0x07ff0000);
270 nv_wo32(ctx, 0x0544, 0x4b7fffff);
271 nv_wo32(ctx, 0x057c, 0x00000080);
272 nv_wo32(ctx, 0x0580, 0x30201000);
273 nv_wo32(ctx, 0x0584, 0x70605040);
274 nv_wo32(ctx, 0x0588, 0xb8a89888);
275 nv_wo32(ctx, 0x058c, 0xf8e8d8c8);
276 nv_wo32(ctx, 0x05a0, 0xb0000000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000277 for (i = 0x05f0; i < 0x0630; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000278 nv_wo32(ctx, i, 0x00010588);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000279 for (i = 0x0630; i < 0x0670; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000280 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000281 for (i = 0x06b0; i < 0x06f0; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000282 nv_wo32(ctx, i, 0x0008aae4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000283 for (i = 0x06f0; i < 0x0730; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000284 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000285 for (i = 0x0730; i < 0x0770; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000286 nv_wo32(ctx, i, 0x00080008);
287 nv_wo32(ctx, 0x0850, 0x00040000);
288 nv_wo32(ctx, 0x0854, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000289 for (i = 0x0858; i < 0x0868; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000290 nv_wo32(ctx, i, 0x00040004);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000291 for (i = 0x15ac; i <= 0x271c ; i += 16) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000292 nv_wo32(ctx, i + 0, 0x10700ff9);
293 nv_wo32(ctx, i + 1, 0x0436086c);
294 nv_wo32(ctx, i + 2, 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000295 }
296 for (i = 0x274c; i < 0x275c; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000297 nv_wo32(ctx, i, 0x0000ffff);
298 nv_wo32(ctx, 0x2ae0, 0x3f800000);
299 nv_wo32(ctx, 0x2e9c, 0x3f800000);
300 nv_wo32(ctx, 0x2eb0, 0x3f800000);
301 nv_wo32(ctx, 0x2edc, 0x40000000);
302 nv_wo32(ctx, 0x2ee0, 0x3f800000);
303 nv_wo32(ctx, 0x2ee4, 0x3f000000);
304 nv_wo32(ctx, 0x2eec, 0x40000000);
305 nv_wo32(ctx, 0x2ef0, 0x3f800000);
306 nv_wo32(ctx, 0x2ef8, 0xbf800000);
307 nv_wo32(ctx, 0x2f00, 0xbf800000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000308}
309
310static void
311nv35_36_graph_context_init(struct drm_device *dev, struct nouveau_gpuobj *ctx)
312{
313 int i;
314
Ben Skeggsb3beb162010-09-01 15:24:29 +1000315 nv_wo32(ctx, 0x040c, 0x00000101);
316 nv_wo32(ctx, 0x0420, 0x00000111);
317 nv_wo32(ctx, 0x0424, 0x00000060);
318 nv_wo32(ctx, 0x0440, 0x00000080);
319 nv_wo32(ctx, 0x0444, 0xffff0000);
320 nv_wo32(ctx, 0x0448, 0x00000001);
321 nv_wo32(ctx, 0x045c, 0x44400000);
322 nv_wo32(ctx, 0x0488, 0xffff0000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000323 for (i = 0x04dc; i < 0x04e4; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000324 nv_wo32(ctx, i, 0x0fff0000);
325 nv_wo32(ctx, 0x04e8, 0x00011100);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000326 for (i = 0x0504; i < 0x0544; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000327 nv_wo32(ctx, i, 0x07ff0000);
328 nv_wo32(ctx, 0x054c, 0x4b7fffff);
329 nv_wo32(ctx, 0x0588, 0x00000080);
330 nv_wo32(ctx, 0x058c, 0x30201000);
331 nv_wo32(ctx, 0x0590, 0x70605040);
332 nv_wo32(ctx, 0x0594, 0xb8a89888);
333 nv_wo32(ctx, 0x0598, 0xf8e8d8c8);
334 nv_wo32(ctx, 0x05ac, 0xb0000000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000335 for (i = 0x0604; i < 0x0644; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000336 nv_wo32(ctx, i, 0x00010588);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000337 for (i = 0x0644; i < 0x0684; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000338 nv_wo32(ctx, i, 0x00030303);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000339 for (i = 0x06c4; i < 0x0704; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000340 nv_wo32(ctx, i, 0x0008aae4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000341 for (i = 0x0704; i < 0x0744; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000342 nv_wo32(ctx, i, 0x01012000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000343 for (i = 0x0744; i < 0x0784; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000344 nv_wo32(ctx, i, 0x00080008);
345 nv_wo32(ctx, 0x0860, 0x00040000);
346 nv_wo32(ctx, 0x0864, 0x00010000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000347 for (i = 0x0868; i < 0x0878; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000348 nv_wo32(ctx, i, 0x00040004);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000349 for (i = 0x1f1c; i <= 0x308c ; i += 16) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000350 nv_wo32(ctx, i + 0, 0x10700ff9);
351 nv_wo32(ctx, i + 4, 0x0436086c);
352 nv_wo32(ctx, i + 8, 0x000c001b);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000353 }
354 for (i = 0x30bc; i < 0x30cc; i += 4)
Ben Skeggsb3beb162010-09-01 15:24:29 +1000355 nv_wo32(ctx, i, 0x0000ffff);
356 nv_wo32(ctx, 0x3450, 0x3f800000);
357 nv_wo32(ctx, 0x380c, 0x3f800000);
358 nv_wo32(ctx, 0x3820, 0x3f800000);
359 nv_wo32(ctx, 0x384c, 0x40000000);
360 nv_wo32(ctx, 0x3850, 0x3f800000);
361 nv_wo32(ctx, 0x3854, 0x3f000000);
362 nv_wo32(ctx, 0x385c, 0x40000000);
363 nv_wo32(ctx, 0x3860, 0x3f800000);
364 nv_wo32(ctx, 0x3868, 0xbf800000);
365 nv_wo32(ctx, 0x3870, 0xbf800000);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000366}
367
368int
369nv20_graph_create_context(struct nouveau_channel *chan)
370{
371 struct drm_device *dev = chan->dev;
372 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggs816544b2010-07-08 13:15:05 +1000373 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000374 void (*ctx_init)(struct drm_device *, struct nouveau_gpuobj *);
Ben Skeggsb3beb162010-09-01 15:24:29 +1000375 unsigned int idoffs = 0x28;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000376 int ret;
377
378 switch (dev_priv->chipset) {
379 case 0x20:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000380 ctx_init = nv20_graph_context_init;
381 idoffs = 0;
382 break;
383 case 0x25:
384 case 0x28:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000385 ctx_init = nv25_graph_context_init;
386 break;
387 case 0x2a:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000388 ctx_init = nv2a_graph_context_init;
389 idoffs = 0;
390 break;
391 case 0x30:
392 case 0x31:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000393 ctx_init = nv30_31_graph_context_init;
394 break;
395 case 0x34:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000396 ctx_init = nv34_graph_context_init;
397 break;
398 case 0x35:
399 case 0x36:
Ben Skeggs6ee73862009-12-11 19:24:15 +1000400 ctx_init = nv35_36_graph_context_init;
401 break;
402 default:
Ben Skeggs816544b2010-07-08 13:15:05 +1000403 BUG_ON(1);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000404 }
405
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000406 ret = nouveau_gpuobj_new(dev, chan, pgraph->grctx_size, 16,
407 NVOBJ_FLAG_ZERO_ALLOC, &chan->ramin_grctx);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000408 if (ret)
409 return ret;
410
411 /* Initialise default context values */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000412 ctx_init(dev, chan->ramin_grctx);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000413
414 /* nv20: nv_wo32(dev, chan->ramin_grctx->gpuobj, 10, chan->id<<24); */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000415 nv_wo32(chan->ramin_grctx, idoffs,
Ben Skeggsb3beb162010-09-01 15:24:29 +1000416 (chan->id << 24) | 0x1); /* CTX_USER */
Ben Skeggs6ee73862009-12-11 19:24:15 +1000417
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000418 nv_wo32(pgraph->ctx_table, chan->id * 4, chan->ramin_grctx->pinst >> 4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000419 return 0;
420}
421
422void
423nv20_graph_destroy_context(struct nouveau_channel *chan)
424{
425 struct drm_device *dev = chan->dev;
426 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsc50a5682010-07-08 15:40:18 +1000427 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
Francisco Jerez3945e472010-10-18 03:53:39 +0200428 unsigned long flags;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000429
Francisco Jerez3945e472010-10-18 03:53:39 +0200430 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
431 pgraph->fifo_access(dev, false);
432
433 /* Unload the context if it's the currently active one */
434 if (pgraph->channel(dev) == chan)
435 pgraph->unload_context(dev);
436
437 pgraph->fifo_access(dev, true);
438 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
439
440 /* Free the context resources */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000441 nv_wo32(pgraph->ctx_table, chan->id * 4, 0);
Francisco Jerez3945e472010-10-18 03:53:39 +0200442 nouveau_gpuobj_ref(NULL, &chan->ramin_grctx);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000443}
444
445int
446nv20_graph_load_context(struct nouveau_channel *chan)
447{
448 struct drm_device *dev = chan->dev;
449 uint32_t inst;
450
451 if (!chan->ramin_grctx)
452 return -EINVAL;
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000453 inst = chan->ramin_grctx->pinst >> 4;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000454
455 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_POINTER, inst);
456 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_XFER,
457 NV20_PGRAPH_CHANNEL_CTX_XFER_LOAD);
458 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
459
460 nouveau_wait_for_idle(dev);
461 return 0;
462}
463
464int
465nv20_graph_unload_context(struct drm_device *dev)
466{
467 struct drm_nouveau_private *dev_priv = dev->dev_private;
468 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
469 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
470 struct nouveau_channel *chan;
471 uint32_t inst, tmp;
472
473 chan = pgraph->channel(dev);
474 if (!chan)
475 return 0;
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000476 inst = chan->ramin_grctx->pinst >> 4;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000477
478 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_POINTER, inst);
479 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_XFER,
480 NV20_PGRAPH_CHANNEL_CTX_XFER_SAVE);
481
482 nouveau_wait_for_idle(dev);
483
484 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
485 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
486 tmp |= (pfifo->channels - 1) << 24;
487 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
488 return 0;
489}
490
491static void
492nv20_graph_rdi(struct drm_device *dev)
493{
494 struct drm_nouveau_private *dev_priv = dev->dev_private;
495 int i, writecount = 32;
496 uint32_t rdi_index = 0x2c80000;
497
498 if (dev_priv->chipset == 0x20) {
499 rdi_index = 0x3d0000;
500 writecount = 15;
501 }
502
503 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, rdi_index);
504 for (i = 0; i < writecount; i++)
505 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, 0);
506
507 nouveau_wait_for_idle(dev);
508}
509
Francisco Jerez0d87c102009-12-16 12:12:27 +0100510void
511nv20_graph_set_region_tiling(struct drm_device *dev, int i, uint32_t addr,
512 uint32_t size, uint32_t pitch)
513{
514 uint32_t limit = max(1u, addr + size) - 1;
515
516 if (pitch)
517 addr |= 1;
518
519 nv_wr32(dev, NV20_PGRAPH_TLIMIT(i), limit);
520 nv_wr32(dev, NV20_PGRAPH_TSIZE(i), pitch);
521 nv_wr32(dev, NV20_PGRAPH_TILE(i), addr);
522
523 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0030 + 4 * i);
524 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, limit);
525 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0050 + 4 * i);
526 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, pitch);
527 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0010 + 4 * i);
528 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, addr);
529}
530
Ben Skeggs6ee73862009-12-11 19:24:15 +1000531int
532nv20_graph_init(struct drm_device *dev)
533{
Ben Skeggsc50a5682010-07-08 15:40:18 +1000534 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggs816544b2010-07-08 13:15:05 +1000535 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000536 uint32_t tmp, vramsz;
537 int ret, i;
538
Ben Skeggs816544b2010-07-08 13:15:05 +1000539 switch (dev_priv->chipset) {
540 case 0x20:
541 pgraph->grctx_size = NV20_GRCTX_SIZE;
542 break;
543 case 0x25:
544 case 0x28:
545 pgraph->grctx_size = NV25_GRCTX_SIZE;
546 break;
547 case 0x2a:
548 pgraph->grctx_size = NV2A_GRCTX_SIZE;
549 break;
550 default:
551 NV_ERROR(dev, "unknown chipset, disabling acceleration\n");
552 pgraph->accel_blocked = true;
553 return 0;
554 }
555
Ben Skeggs6ee73862009-12-11 19:24:15 +1000556 nv_wr32(dev, NV03_PMC_ENABLE,
557 nv_rd32(dev, NV03_PMC_ENABLE) & ~NV_PMC_ENABLE_PGRAPH);
558 nv_wr32(dev, NV03_PMC_ENABLE,
559 nv_rd32(dev, NV03_PMC_ENABLE) | NV_PMC_ENABLE_PGRAPH);
560
Ben Skeggsc50a5682010-07-08 15:40:18 +1000561 if (!pgraph->ctx_table) {
Ben Skeggs6ee73862009-12-11 19:24:15 +1000562 /* Create Context Pointer Table */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000563 ret = nouveau_gpuobj_new(dev, NULL, 32 * 4, 16,
564 NVOBJ_FLAG_ZERO_ALLOC,
565 &pgraph->ctx_table);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000566 if (ret)
567 return ret;
568 }
569
570 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE,
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000571 pgraph->ctx_table->pinst >> 4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000572
573 nv20_graph_rdi(dev);
574
575 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
576 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
577
578 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
579 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
580 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
581 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xF3CE0475); /* 0x4 = auto ctx switch */
582 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
583 nv_wr32(dev, 0x40009C , 0x00000040);
584
585 if (dev_priv->chipset >= 0x25) {
586 nv_wr32(dev, 0x400890, 0x00080000);
587 nv_wr32(dev, 0x400610, 0x304B1FB6);
588 nv_wr32(dev, 0x400B80, 0x18B82880);
589 nv_wr32(dev, 0x400B84, 0x44000000);
590 nv_wr32(dev, 0x400098, 0x40000080);
591 nv_wr32(dev, 0x400B88, 0x000000ff);
592 } else {
593 nv_wr32(dev, 0x400880, 0x00080000); /* 0x0008c7df */
594 nv_wr32(dev, 0x400094, 0x00000005);
595 nv_wr32(dev, 0x400B80, 0x45CAA208); /* 0x45eae20e */
596 nv_wr32(dev, 0x400B84, 0x24000000);
597 nv_wr32(dev, 0x400098, 0x00000040);
598 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00E00038);
599 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00000030);
600 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00E10038);
601 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00000030);
602 }
603
Francisco Jerez0d87c102009-12-16 12:12:27 +0100604 /* Turn all the tiling regions off. */
605 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
606 nv20_graph_set_region_tiling(dev, i, 0, 0, 0);
607
Ben Skeggs6ee73862009-12-11 19:24:15 +1000608 for (i = 0; i < 8; i++) {
609 nv_wr32(dev, 0x400980 + i * 4, nv_rd32(dev, 0x100300 + i * 4));
610 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0090 + i * 4);
611 nv_wr32(dev, NV10_PGRAPH_RDI_DATA,
612 nv_rd32(dev, 0x100300 + i * 4));
613 }
614 nv_wr32(dev, 0x4009a0, nv_rd32(dev, 0x100324));
615 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA000C);
616 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, nv_rd32(dev, 0x100324));
617
618 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
619 nv_wr32(dev, NV10_PGRAPH_STATE , 0xFFFFFFFF);
620
621 tmp = nv_rd32(dev, NV10_PGRAPH_SURFACE) & 0x0007ff00;
622 nv_wr32(dev, NV10_PGRAPH_SURFACE, tmp);
623 tmp = nv_rd32(dev, NV10_PGRAPH_SURFACE) | 0x00020100;
624 nv_wr32(dev, NV10_PGRAPH_SURFACE, tmp);
625
626 /* begin RAM config */
Jordan Crouse01d73a62010-05-27 13:40:24 -0600627 vramsz = pci_resource_len(dev->pdev, 0) - 1;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000628 nv_wr32(dev, 0x4009A4, nv_rd32(dev, NV04_PFB_CFG0));
629 nv_wr32(dev, 0x4009A8, nv_rd32(dev, NV04_PFB_CFG1));
630 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0000);
631 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , nv_rd32(dev, NV04_PFB_CFG0));
632 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0004);
633 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , nv_rd32(dev, NV04_PFB_CFG1));
634 nv_wr32(dev, 0x400820, 0);
635 nv_wr32(dev, 0x400824, 0);
636 nv_wr32(dev, 0x400864, vramsz - 1);
637 nv_wr32(dev, 0x400868, vramsz - 1);
638
639 /* interesting.. the below overwrites some of the tile setup above.. */
640 nv_wr32(dev, 0x400B20, 0x00000000);
641 nv_wr32(dev, 0x400B04, 0xFFFFFFFF);
642
643 nv_wr32(dev, NV03_PGRAPH_ABS_UCLIP_XMIN, 0);
644 nv_wr32(dev, NV03_PGRAPH_ABS_UCLIP_YMIN, 0);
645 nv_wr32(dev, NV03_PGRAPH_ABS_UCLIP_XMAX, 0x7fff);
646 nv_wr32(dev, NV03_PGRAPH_ABS_UCLIP_YMAX, 0x7fff);
647
648 return 0;
649}
650
651void
652nv20_graph_takedown(struct drm_device *dev)
653{
654 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsc50a5682010-07-08 15:40:18 +1000655 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000656
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000657 nouveau_gpuobj_ref(NULL, &pgraph->ctx_table);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000658}
659
660int
661nv30_graph_init(struct drm_device *dev)
662{
663 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggs816544b2010-07-08 13:15:05 +1000664 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000665 int ret, i;
666
Ben Skeggs816544b2010-07-08 13:15:05 +1000667 switch (dev_priv->chipset) {
668 case 0x30:
669 case 0x31:
670 pgraph->grctx_size = NV30_31_GRCTX_SIZE;
671 break;
672 case 0x34:
673 pgraph->grctx_size = NV34_GRCTX_SIZE;
674 break;
675 case 0x35:
676 case 0x36:
677 pgraph->grctx_size = NV35_36_GRCTX_SIZE;
678 break;
679 default:
680 NV_ERROR(dev, "unknown chipset, disabling acceleration\n");
681 pgraph->accel_blocked = true;
682 return 0;
683 }
684
Ben Skeggs6ee73862009-12-11 19:24:15 +1000685 nv_wr32(dev, NV03_PMC_ENABLE,
686 nv_rd32(dev, NV03_PMC_ENABLE) & ~NV_PMC_ENABLE_PGRAPH);
687 nv_wr32(dev, NV03_PMC_ENABLE,
688 nv_rd32(dev, NV03_PMC_ENABLE) | NV_PMC_ENABLE_PGRAPH);
689
Ben Skeggsc50a5682010-07-08 15:40:18 +1000690 if (!pgraph->ctx_table) {
Ben Skeggs6ee73862009-12-11 19:24:15 +1000691 /* Create Context Pointer Table */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000692 ret = nouveau_gpuobj_new(dev, NULL, 32 * 4, 16,
693 NVOBJ_FLAG_ZERO_ALLOC,
694 &pgraph->ctx_table);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000695 if (ret)
696 return ret;
697 }
698
699 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE,
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000700 pgraph->ctx_table->pinst >> 4);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000701
702 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
703 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
704
705 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
706 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
707 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x401287c0);
708 nv_wr32(dev, 0x400890, 0x01b463ff);
709 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf2de0475);
710 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00008000);
711 nv_wr32(dev, NV04_PGRAPH_LIMIT_VIOL_PIX, 0xf04bdff6);
712 nv_wr32(dev, 0x400B80, 0x1003d888);
713 nv_wr32(dev, 0x400B84, 0x0c000000);
714 nv_wr32(dev, 0x400098, 0x00000000);
715 nv_wr32(dev, 0x40009C, 0x0005ad00);
716 nv_wr32(dev, 0x400B88, 0x62ff00ff); /* suspiciously like PGRAPH_DEBUG_2 */
717 nv_wr32(dev, 0x4000a0, 0x00000000);
718 nv_wr32(dev, 0x4000a4, 0x00000008);
719 nv_wr32(dev, 0x4008a8, 0xb784a400);
720 nv_wr32(dev, 0x400ba0, 0x002f8685);
721 nv_wr32(dev, 0x400ba4, 0x00231f3f);
722 nv_wr32(dev, 0x4008a4, 0x40000020);
723
724 if (dev_priv->chipset == 0x34) {
725 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0004);
726 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00200201);
727 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0008);
728 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00000008);
729 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0000);
730 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00000032);
731 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00E00004);
732 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , 0x00000002);
733 }
734
735 nv_wr32(dev, 0x4000c0, 0x00000016);
736
Francisco Jerez0d87c102009-12-16 12:12:27 +0100737 /* Turn all the tiling regions off. */
738 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
739 nv20_graph_set_region_tiling(dev, i, 0, 0, 0);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000740
741 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
742 nv_wr32(dev, NV10_PGRAPH_STATE , 0xFFFFFFFF);
743 nv_wr32(dev, 0x0040075c , 0x00000001);
744
745 /* begin RAM config */
Jordan Crouse01d73a62010-05-27 13:40:24 -0600746 /* vramsz = pci_resource_len(dev->pdev, 0) - 1; */
Ben Skeggs6ee73862009-12-11 19:24:15 +1000747 nv_wr32(dev, 0x4009A4, nv_rd32(dev, NV04_PFB_CFG0));
748 nv_wr32(dev, 0x4009A8, nv_rd32(dev, NV04_PFB_CFG1));
749 if (dev_priv->chipset != 0x34) {
750 nv_wr32(dev, 0x400750, 0x00EA0000);
751 nv_wr32(dev, 0x400754, nv_rd32(dev, NV04_PFB_CFG0));
752 nv_wr32(dev, 0x400750, 0x00EA0004);
753 nv_wr32(dev, 0x400754, nv_rd32(dev, NV04_PFB_CFG1));
754 }
755
756 return 0;
757}
758
759struct nouveau_pgraph_object_class nv20_graph_grclass[] = {
760 { 0x0030, false, NULL }, /* null */
761 { 0x0039, false, NULL }, /* m2mf */
762 { 0x004a, false, NULL }, /* gdirect */
763 { 0x009f, false, NULL }, /* imageblit (nv12) */
764 { 0x008a, false, NULL }, /* ifc */
765 { 0x0089, false, NULL }, /* sifm */
766 { 0x0062, false, NULL }, /* surf2d */
767 { 0x0043, false, NULL }, /* rop */
768 { 0x0012, false, NULL }, /* beta1 */
769 { 0x0072, false, NULL }, /* beta4 */
770 { 0x0019, false, NULL }, /* cliprect */
771 { 0x0044, false, NULL }, /* pattern */
772 { 0x009e, false, NULL }, /* swzsurf */
773 { 0x0096, false, NULL }, /* celcius */
774 { 0x0097, false, NULL }, /* kelvin (nv20) */
775 { 0x0597, false, NULL }, /* kelvin (nv25) */
776 {}
777};
778
779struct nouveau_pgraph_object_class nv30_graph_grclass[] = {
780 { 0x0030, false, NULL }, /* null */
781 { 0x0039, false, NULL }, /* m2mf */
782 { 0x004a, false, NULL }, /* gdirect */
783 { 0x009f, false, NULL }, /* imageblit (nv12) */
784 { 0x008a, false, NULL }, /* ifc */
785 { 0x038a, false, NULL }, /* ifc (nv30) */
786 { 0x0089, false, NULL }, /* sifm */
787 { 0x0389, false, NULL }, /* sifm (nv30) */
788 { 0x0062, false, NULL }, /* surf2d */
789 { 0x0362, false, NULL }, /* surf2d (nv30) */
790 { 0x0043, false, NULL }, /* rop */
791 { 0x0012, false, NULL }, /* beta1 */
792 { 0x0072, false, NULL }, /* beta4 */
793 { 0x0019, false, NULL }, /* cliprect */
794 { 0x0044, false, NULL }, /* pattern */
795 { 0x039e, false, NULL }, /* swzsurf */
796 { 0x0397, false, NULL }, /* rankine (nv30) */
797 { 0x0497, false, NULL }, /* rankine (nv35) */
798 { 0x0697, false, NULL }, /* rankine (nv34) */
799 {}
800};
801