blob: 8dd04c5dac67a6d4b6eeb57584f79878eee6e417 [file] [log] [blame]
Ben Skeggs6ee73862009-12-11 19:24:15 +10001/*
2 * Copyright (C) 2007 Ben Skeggs.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27#include "drmP.h"
28#include "drm.h"
29#include "nouveau_drv.h"
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100030#include "nouveau_ramht.h"
Ben Skeggsa11c3192010-08-27 10:00:25 +100031#include "nouveau_vm.h"
Ben Skeggs6ee73862009-12-11 19:24:15 +100032
Ben Skeggs6ee73862009-12-11 19:24:15 +100033static void
Ben Skeggsac94a342010-07-08 15:28:48 +100034nv50_fifo_playlist_update(struct drm_device *dev)
Ben Skeggs6ee73862009-12-11 19:24:15 +100035{
36 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsac94a342010-07-08 15:28:48 +100037 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100038 struct nouveau_gpuobj *cur;
Ben Skeggs6ee73862009-12-11 19:24:15 +100039 int i, nr;
40
41 NV_DEBUG(dev, "\n");
42
Ben Skeggsac94a342010-07-08 15:28:48 +100043 cur = pfifo->playlist[pfifo->cur_playlist];
44 pfifo->cur_playlist = !pfifo->cur_playlist;
Ben Skeggs6ee73862009-12-11 19:24:15 +100045
46 /* We never schedule channel 0 or 127 */
Ben Skeggs6ee73862009-12-11 19:24:15 +100047 for (i = 1, nr = 0; i < 127; i++) {
Ben Skeggscff5c132010-10-06 16:16:59 +100048 if (dev_priv->channels.ptr[i] &&
49 dev_priv->channels.ptr[i]->ramfc) {
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100050 nv_wo32(cur, (nr * 4), i);
Ben Skeggsb3beb162010-09-01 15:24:29 +100051 nr++;
52 }
Ben Skeggs6ee73862009-12-11 19:24:15 +100053 }
Ben Skeggsf56cb862010-07-08 11:29:10 +100054 dev_priv->engine.instmem.flush(dev);
Ben Skeggs6ee73862009-12-11 19:24:15 +100055
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100056 nv_wr32(dev, 0x32f4, cur->vinst >> 12);
Ben Skeggs6ee73862009-12-11 19:24:15 +100057 nv_wr32(dev, 0x32ec, nr);
58 nv_wr32(dev, 0x2500, 0x101);
59}
60
Ben Skeggsac94a342010-07-08 15:28:48 +100061static void
62nv50_fifo_channel_enable(struct drm_device *dev, int channel)
Ben Skeggs6ee73862009-12-11 19:24:15 +100063{
64 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggscff5c132010-10-06 16:16:59 +100065 struct nouveau_channel *chan = dev_priv->channels.ptr[channel];
Ben Skeggs6ee73862009-12-11 19:24:15 +100066 uint32_t inst;
67
68 NV_DEBUG(dev, "ch%d\n", channel);
69
Ben Skeggsac94a342010-07-08 15:28:48 +100070 if (dev_priv->chipset == 0x50)
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100071 inst = chan->ramfc->vinst >> 12;
Ben Skeggs6ee73862009-12-11 19:24:15 +100072 else
Ben Skeggsa8eaebc2010-09-01 15:24:31 +100073 inst = chan->ramfc->vinst >> 8;
Ben Skeggs6ee73862009-12-11 19:24:15 +100074
Ben Skeggsac94a342010-07-08 15:28:48 +100075 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel), inst |
76 NV50_PFIFO_CTX_TABLE_CHANNEL_ENABLED);
Ben Skeggs6ee73862009-12-11 19:24:15 +100077}
78
79static void
Ben Skeggsac94a342010-07-08 15:28:48 +100080nv50_fifo_channel_disable(struct drm_device *dev, int channel)
Ben Skeggs6ee73862009-12-11 19:24:15 +100081{
82 struct drm_nouveau_private *dev_priv = dev->dev_private;
83 uint32_t inst;
84
Ben Skeggsac94a342010-07-08 15:28:48 +100085 NV_DEBUG(dev, "ch%d\n", channel);
Ben Skeggs6ee73862009-12-11 19:24:15 +100086
Ben Skeggsac94a342010-07-08 15:28:48 +100087 if (dev_priv->chipset == 0x50)
Ben Skeggs6ee73862009-12-11 19:24:15 +100088 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G80;
89 else
90 inst = NV50_PFIFO_CTX_TABLE_INSTANCE_MASK_G84;
91 nv_wr32(dev, NV50_PFIFO_CTX_TABLE(channel), inst);
Ben Skeggs6ee73862009-12-11 19:24:15 +100092}
93
94static void
95nv50_fifo_init_reset(struct drm_device *dev)
96{
97 uint32_t pmc_e = NV_PMC_ENABLE_PFIFO;
98
99 NV_DEBUG(dev, "\n");
100
101 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & ~pmc_e);
102 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | pmc_e);
103}
104
105static void
106nv50_fifo_init_intr(struct drm_device *dev)
107{
108 NV_DEBUG(dev, "\n");
109
Ben Skeggs5178d402010-11-03 10:56:05 +1000110 nouveau_irq_register(dev, 8, nv04_fifo_isr);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000111 nv_wr32(dev, NV03_PFIFO_INTR_0, 0xFFFFFFFF);
112 nv_wr32(dev, NV03_PFIFO_INTR_EN_0, 0xFFFFFFFF);
113}
114
115static void
116nv50_fifo_init_context_table(struct drm_device *dev)
117{
118 struct drm_nouveau_private *dev_priv = dev->dev_private;
119 int i;
120
121 NV_DEBUG(dev, "\n");
122
123 for (i = 0; i < NV50_PFIFO_CTX_TABLE__SIZE; i++) {
Ben Skeggscff5c132010-10-06 16:16:59 +1000124 if (dev_priv->channels.ptr[i])
Ben Skeggsac94a342010-07-08 15:28:48 +1000125 nv50_fifo_channel_enable(dev, i);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000126 else
Ben Skeggsac94a342010-07-08 15:28:48 +1000127 nv50_fifo_channel_disable(dev, i);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000128 }
129
Ben Skeggsac94a342010-07-08 15:28:48 +1000130 nv50_fifo_playlist_update(dev);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000131}
132
133static void
134nv50_fifo_init_regs__nv(struct drm_device *dev)
135{
136 NV_DEBUG(dev, "\n");
137
138 nv_wr32(dev, 0x250c, 0x6f3cfc34);
139}
140
141static void
142nv50_fifo_init_regs(struct drm_device *dev)
143{
144 NV_DEBUG(dev, "\n");
145
146 nv_wr32(dev, 0x2500, 0);
147 nv_wr32(dev, 0x3250, 0);
148 nv_wr32(dev, 0x3220, 0);
149 nv_wr32(dev, 0x3204, 0);
150 nv_wr32(dev, 0x3210, 0);
151 nv_wr32(dev, 0x3270, 0);
152
153 /* Enable dummy channels setup by nv50_instmem.c */
Ben Skeggsac94a342010-07-08 15:28:48 +1000154 nv50_fifo_channel_enable(dev, 0);
155 nv50_fifo_channel_enable(dev, 127);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000156}
157
158int
159nv50_fifo_init(struct drm_device *dev)
160{
161 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsac94a342010-07-08 15:28:48 +1000162 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000163 int ret;
164
165 NV_DEBUG(dev, "\n");
166
Ben Skeggsac94a342010-07-08 15:28:48 +1000167 if (pfifo->playlist[0]) {
168 pfifo->cur_playlist = !pfifo->cur_playlist;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000169 goto just_reset;
170 }
171
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000172 ret = nouveau_gpuobj_new(dev, NULL, 128*4, 0x1000,
173 NVOBJ_FLAG_ZERO_ALLOC,
174 &pfifo->playlist[0]);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000175 if (ret) {
Ben Skeggsac94a342010-07-08 15:28:48 +1000176 NV_ERROR(dev, "error creating playlist 0: %d\n", ret);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000177 return ret;
178 }
179
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000180 ret = nouveau_gpuobj_new(dev, NULL, 128*4, 0x1000,
181 NVOBJ_FLAG_ZERO_ALLOC,
182 &pfifo->playlist[1]);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000183 if (ret) {
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000184 nouveau_gpuobj_ref(NULL, &pfifo->playlist[0]);
Ben Skeggsac94a342010-07-08 15:28:48 +1000185 NV_ERROR(dev, "error creating playlist 1: %d\n", ret);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000186 return ret;
187 }
188
189just_reset:
190 nv50_fifo_init_reset(dev);
191 nv50_fifo_init_intr(dev);
192 nv50_fifo_init_context_table(dev);
193 nv50_fifo_init_regs__nv(dev);
194 nv50_fifo_init_regs(dev);
195 dev_priv->engine.fifo.enable(dev);
196 dev_priv->engine.fifo.reassign(dev, true);
197
198 return 0;
199}
200
201void
202nv50_fifo_takedown(struct drm_device *dev)
203{
204 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsac94a342010-07-08 15:28:48 +1000205 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000206
207 NV_DEBUG(dev, "\n");
208
Ben Skeggsac94a342010-07-08 15:28:48 +1000209 if (!pfifo->playlist[0])
Ben Skeggs6ee73862009-12-11 19:24:15 +1000210 return;
211
Ben Skeggs5178d402010-11-03 10:56:05 +1000212 nv_wr32(dev, 0x2140, 0x00000000);
213 nouveau_irq_unregister(dev, 8);
214
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000215 nouveau_gpuobj_ref(NULL, &pfifo->playlist[0]);
216 nouveau_gpuobj_ref(NULL, &pfifo->playlist[1]);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000217}
218
219int
220nv50_fifo_channel_id(struct drm_device *dev)
221{
222 return nv_rd32(dev, NV03_PFIFO_CACHE1_PUSH1) &
223 NV50_PFIFO_CACHE1_PUSH1_CHID_MASK;
224}
225
226int
227nv50_fifo_create_context(struct nouveau_channel *chan)
228{
229 struct drm_device *dev = chan->dev;
230 struct drm_nouveau_private *dev_priv = dev->dev_private;
231 struct nouveau_gpuobj *ramfc = NULL;
Maarten Maathuisff9e5272010-02-01 20:58:27 +0100232 unsigned long flags;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000233 int ret;
234
235 NV_DEBUG(dev, "ch%d\n", chan->id);
236
Ben Skeggsac94a342010-07-08 15:28:48 +1000237 if (dev_priv->chipset == 0x50) {
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000238 ret = nouveau_gpuobj_new_fake(dev, chan->ramin->pinst,
239 chan->ramin->vinst, 0x100,
Ben Skeggsde3a6c02010-09-01 15:24:30 +1000240 NVOBJ_FLAG_ZERO_ALLOC |
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000241 NVOBJ_FLAG_ZERO_FREE,
Ben Skeggs6ee73862009-12-11 19:24:15 +1000242 &chan->ramfc);
243 if (ret)
244 return ret;
245
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000246 ret = nouveau_gpuobj_new_fake(dev, chan->ramin->pinst + 0x0400,
247 chan->ramin->vinst + 0x0400,
248 4096, 0, &chan->cache);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000249 if (ret)
250 return ret;
251 } else {
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000252 ret = nouveau_gpuobj_new(dev, chan, 0x100, 256,
253 NVOBJ_FLAG_ZERO_ALLOC |
254 NVOBJ_FLAG_ZERO_FREE, &chan->ramfc);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000255 if (ret)
256 return ret;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000257
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000258 ret = nouveau_gpuobj_new(dev, chan, 4096, 1024,
259 0, &chan->cache);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000260 if (ret)
261 return ret;
262 }
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000263 ramfc = chan->ramfc;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000264
Ben Skeggsd9081752010-11-22 16:05:54 +1000265 chan->user = ioremap(pci_resource_start(dev->pdev, 0) +
266 NV50_USER(chan->id), PAGE_SIZE);
267 if (!chan->user)
268 return -ENOMEM;
269
Maarten Maathuisff9e5272010-02-01 20:58:27 +0100270 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
271
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000272 nv_wo32(ramfc, 0x48, chan->pushbuf->cinst >> 4);
Ben Skeggse05c5a32010-09-01 15:24:35 +1000273 nv_wo32(ramfc, 0x80, ((chan->ramht->bits - 9) << 27) |
Ben Skeggsb3beb162010-09-01 15:24:29 +1000274 (4 << 24) /* SEARCH_FULL */ |
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000275 (chan->ramht->gpuobj->cinst >> 4));
Ben Skeggsb3beb162010-09-01 15:24:29 +1000276 nv_wo32(ramfc, 0x44, 0x2101ffff);
277 nv_wo32(ramfc, 0x60, 0x7fffffff);
278 nv_wo32(ramfc, 0x40, 0x00000000);
279 nv_wo32(ramfc, 0x7c, 0x30000001);
280 nv_wo32(ramfc, 0x78, 0x00000000);
281 nv_wo32(ramfc, 0x3c, 0x403f6078);
282 nv_wo32(ramfc, 0x50, chan->pushbuf_base + chan->dma.ib_base * 4);
283 nv_wo32(ramfc, 0x54, drm_order(chan->dma.ib_max + 1) << 16);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000284
Ben Skeggsac94a342010-07-08 15:28:48 +1000285 if (dev_priv->chipset != 0x50) {
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000286 nv_wo32(chan->ramin, 0, chan->id);
287 nv_wo32(chan->ramin, 4, chan->ramfc->vinst >> 8);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000288
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000289 nv_wo32(ramfc, 0x88, chan->cache->vinst >> 10);
290 nv_wo32(ramfc, 0x98, chan->ramin->vinst >> 12);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000291 }
292
Ben Skeggsf56cb862010-07-08 11:29:10 +1000293 dev_priv->engine.instmem.flush(dev);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000294
Ben Skeggsac94a342010-07-08 15:28:48 +1000295 nv50_fifo_channel_enable(dev, chan->id);
296 nv50_fifo_playlist_update(dev);
Maarten Maathuisff9e5272010-02-01 20:58:27 +0100297 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000298 return 0;
299}
300
301void
302nv50_fifo_destroy_context(struct nouveau_channel *chan)
303{
304 struct drm_device *dev = chan->dev;
Francisco Jerez3945e472010-10-18 03:53:39 +0200305 struct drm_nouveau_private *dev_priv = dev->dev_private;
306 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000307 struct nouveau_gpuobj *ramfc = NULL;
Francisco Jerez3945e472010-10-18 03:53:39 +0200308 unsigned long flags;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000309
310 NV_DEBUG(dev, "ch%d\n", chan->id);
311
Francisco Jerez3945e472010-10-18 03:53:39 +0200312 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
313 pfifo->reassign(dev, false);
314
315 /* Unload the context if it's the currently active one */
316 if (pfifo->channel_id(dev) == chan->id) {
317 pfifo->disable(dev);
318 pfifo->unload_context(dev);
319 pfifo->enable(dev);
320 }
321
Maarten Maathuisa87ff622010-02-01 18:47:52 +0100322 /* This will ensure the channel is seen as disabled. */
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000323 nouveau_gpuobj_ref(chan->ramfc, &ramfc);
324 nouveau_gpuobj_ref(NULL, &chan->ramfc);
Ben Skeggsac94a342010-07-08 15:28:48 +1000325 nv50_fifo_channel_disable(dev, chan->id);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000326
327 /* Dummy channel, also used on ch 127 */
328 if (chan->id == 0)
Ben Skeggsac94a342010-07-08 15:28:48 +1000329 nv50_fifo_channel_disable(dev, 127);
330 nv50_fifo_playlist_update(dev);
Maarten Maathuisa87ff622010-02-01 18:47:52 +0100331
Francisco Jerez3945e472010-10-18 03:53:39 +0200332 pfifo->reassign(dev, true);
333 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
334
335 /* Free the channel resources */
Ben Skeggsd9081752010-11-22 16:05:54 +1000336 if (chan->user) {
337 iounmap(chan->user);
338 chan->user = NULL;
339 }
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000340 nouveau_gpuobj_ref(NULL, &ramfc);
341 nouveau_gpuobj_ref(NULL, &chan->cache);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000342}
343
344int
345nv50_fifo_load_context(struct nouveau_channel *chan)
346{
347 struct drm_device *dev = chan->dev;
348 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000349 struct nouveau_gpuobj *ramfc = chan->ramfc;
350 struct nouveau_gpuobj *cache = chan->cache;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000351 int ptr, cnt;
352
353 NV_DEBUG(dev, "ch%d\n", chan->id);
354
Ben Skeggsb3beb162010-09-01 15:24:29 +1000355 nv_wr32(dev, 0x3330, nv_ro32(ramfc, 0x00));
356 nv_wr32(dev, 0x3334, nv_ro32(ramfc, 0x04));
357 nv_wr32(dev, 0x3240, nv_ro32(ramfc, 0x08));
358 nv_wr32(dev, 0x3320, nv_ro32(ramfc, 0x0c));
359 nv_wr32(dev, 0x3244, nv_ro32(ramfc, 0x10));
360 nv_wr32(dev, 0x3328, nv_ro32(ramfc, 0x14));
361 nv_wr32(dev, 0x3368, nv_ro32(ramfc, 0x18));
362 nv_wr32(dev, 0x336c, nv_ro32(ramfc, 0x1c));
363 nv_wr32(dev, 0x3370, nv_ro32(ramfc, 0x20));
364 nv_wr32(dev, 0x3374, nv_ro32(ramfc, 0x24));
365 nv_wr32(dev, 0x3378, nv_ro32(ramfc, 0x28));
366 nv_wr32(dev, 0x337c, nv_ro32(ramfc, 0x2c));
367 nv_wr32(dev, 0x3228, nv_ro32(ramfc, 0x30));
368 nv_wr32(dev, 0x3364, nv_ro32(ramfc, 0x34));
369 nv_wr32(dev, 0x32a0, nv_ro32(ramfc, 0x38));
370 nv_wr32(dev, 0x3224, nv_ro32(ramfc, 0x3c));
371 nv_wr32(dev, 0x324c, nv_ro32(ramfc, 0x40));
372 nv_wr32(dev, 0x2044, nv_ro32(ramfc, 0x44));
373 nv_wr32(dev, 0x322c, nv_ro32(ramfc, 0x48));
374 nv_wr32(dev, 0x3234, nv_ro32(ramfc, 0x4c));
375 nv_wr32(dev, 0x3340, nv_ro32(ramfc, 0x50));
376 nv_wr32(dev, 0x3344, nv_ro32(ramfc, 0x54));
377 nv_wr32(dev, 0x3280, nv_ro32(ramfc, 0x58));
378 nv_wr32(dev, 0x3254, nv_ro32(ramfc, 0x5c));
379 nv_wr32(dev, 0x3260, nv_ro32(ramfc, 0x60));
380 nv_wr32(dev, 0x3264, nv_ro32(ramfc, 0x64));
381 nv_wr32(dev, 0x3268, nv_ro32(ramfc, 0x68));
382 nv_wr32(dev, 0x326c, nv_ro32(ramfc, 0x6c));
383 nv_wr32(dev, 0x32e4, nv_ro32(ramfc, 0x70));
384 nv_wr32(dev, 0x3248, nv_ro32(ramfc, 0x74));
385 nv_wr32(dev, 0x2088, nv_ro32(ramfc, 0x78));
386 nv_wr32(dev, 0x2058, nv_ro32(ramfc, 0x7c));
387 nv_wr32(dev, 0x2210, nv_ro32(ramfc, 0x80));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000388
Ben Skeggsb3beb162010-09-01 15:24:29 +1000389 cnt = nv_ro32(ramfc, 0x84);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000390 for (ptr = 0; ptr < cnt; ptr++) {
391 nv_wr32(dev, NV40_PFIFO_CACHE1_METHOD(ptr),
Ben Skeggsb3beb162010-09-01 15:24:29 +1000392 nv_ro32(cache, (ptr * 8) + 0));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000393 nv_wr32(dev, NV40_PFIFO_CACHE1_DATA(ptr),
Ben Skeggsb3beb162010-09-01 15:24:29 +1000394 nv_ro32(cache, (ptr * 8) + 4));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000395 }
Ben Skeggs7fb8ec82010-01-05 09:41:05 +1000396 nv_wr32(dev, NV03_PFIFO_CACHE1_PUT, cnt << 2);
397 nv_wr32(dev, NV03_PFIFO_CACHE1_GET, 0);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000398
399 /* guessing that all the 0x34xx regs aren't on NV50 */
Ben Skeggsac94a342010-07-08 15:28:48 +1000400 if (dev_priv->chipset != 0x50) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000401 nv_wr32(dev, 0x340c, nv_ro32(ramfc, 0x88));
402 nv_wr32(dev, 0x3400, nv_ro32(ramfc, 0x8c));
403 nv_wr32(dev, 0x3404, nv_ro32(ramfc, 0x90));
404 nv_wr32(dev, 0x3408, nv_ro32(ramfc, 0x94));
405 nv_wr32(dev, 0x3410, nv_ro32(ramfc, 0x98));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000406 }
407
Ben Skeggs6ee73862009-12-11 19:24:15 +1000408 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, chan->id | (1<<16));
409 return 0;
410}
411
412int
413nv50_fifo_unload_context(struct drm_device *dev)
414{
415 struct drm_nouveau_private *dev_priv = dev->dev_private;
416 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
417 struct nouveau_gpuobj *ramfc, *cache;
418 struct nouveau_channel *chan = NULL;
419 int chid, get, put, ptr;
420
421 NV_DEBUG(dev, "\n");
422
423 chid = pfifo->channel_id(dev);
Ben Skeggs3c8868d2009-12-16 14:51:13 +1000424 if (chid < 1 || chid >= dev_priv->engine.fifo.channels - 1)
Ben Skeggs6ee73862009-12-11 19:24:15 +1000425 return 0;
426
Ben Skeggscff5c132010-10-06 16:16:59 +1000427 chan = dev_priv->channels.ptr[chid];
Ben Skeggs6ee73862009-12-11 19:24:15 +1000428 if (!chan) {
429 NV_ERROR(dev, "Inactive channel on PFIFO: %d\n", chid);
430 return -EINVAL;
431 }
432 NV_DEBUG(dev, "ch%d\n", chan->id);
Ben Skeggsa8eaebc2010-09-01 15:24:31 +1000433 ramfc = chan->ramfc;
434 cache = chan->cache;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000435
Ben Skeggsb3beb162010-09-01 15:24:29 +1000436 nv_wo32(ramfc, 0x00, nv_rd32(dev, 0x3330));
437 nv_wo32(ramfc, 0x04, nv_rd32(dev, 0x3334));
438 nv_wo32(ramfc, 0x08, nv_rd32(dev, 0x3240));
439 nv_wo32(ramfc, 0x0c, nv_rd32(dev, 0x3320));
440 nv_wo32(ramfc, 0x10, nv_rd32(dev, 0x3244));
441 nv_wo32(ramfc, 0x14, nv_rd32(dev, 0x3328));
442 nv_wo32(ramfc, 0x18, nv_rd32(dev, 0x3368));
443 nv_wo32(ramfc, 0x1c, nv_rd32(dev, 0x336c));
444 nv_wo32(ramfc, 0x20, nv_rd32(dev, 0x3370));
445 nv_wo32(ramfc, 0x24, nv_rd32(dev, 0x3374));
446 nv_wo32(ramfc, 0x28, nv_rd32(dev, 0x3378));
447 nv_wo32(ramfc, 0x2c, nv_rd32(dev, 0x337c));
448 nv_wo32(ramfc, 0x30, nv_rd32(dev, 0x3228));
449 nv_wo32(ramfc, 0x34, nv_rd32(dev, 0x3364));
450 nv_wo32(ramfc, 0x38, nv_rd32(dev, 0x32a0));
451 nv_wo32(ramfc, 0x3c, nv_rd32(dev, 0x3224));
452 nv_wo32(ramfc, 0x40, nv_rd32(dev, 0x324c));
453 nv_wo32(ramfc, 0x44, nv_rd32(dev, 0x2044));
454 nv_wo32(ramfc, 0x48, nv_rd32(dev, 0x322c));
455 nv_wo32(ramfc, 0x4c, nv_rd32(dev, 0x3234));
456 nv_wo32(ramfc, 0x50, nv_rd32(dev, 0x3340));
457 nv_wo32(ramfc, 0x54, nv_rd32(dev, 0x3344));
458 nv_wo32(ramfc, 0x58, nv_rd32(dev, 0x3280));
459 nv_wo32(ramfc, 0x5c, nv_rd32(dev, 0x3254));
460 nv_wo32(ramfc, 0x60, nv_rd32(dev, 0x3260));
461 nv_wo32(ramfc, 0x64, nv_rd32(dev, 0x3264));
462 nv_wo32(ramfc, 0x68, nv_rd32(dev, 0x3268));
463 nv_wo32(ramfc, 0x6c, nv_rd32(dev, 0x326c));
464 nv_wo32(ramfc, 0x70, nv_rd32(dev, 0x32e4));
465 nv_wo32(ramfc, 0x74, nv_rd32(dev, 0x3248));
466 nv_wo32(ramfc, 0x78, nv_rd32(dev, 0x2088));
467 nv_wo32(ramfc, 0x7c, nv_rd32(dev, 0x2058));
468 nv_wo32(ramfc, 0x80, nv_rd32(dev, 0x2210));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000469
470 put = (nv_rd32(dev, NV03_PFIFO_CACHE1_PUT) & 0x7ff) >> 2;
471 get = (nv_rd32(dev, NV03_PFIFO_CACHE1_GET) & 0x7ff) >> 2;
472 ptr = 0;
473 while (put != get) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000474 nv_wo32(cache, ptr + 0,
475 nv_rd32(dev, NV40_PFIFO_CACHE1_METHOD(get)));
476 nv_wo32(cache, ptr + 4,
477 nv_rd32(dev, NV40_PFIFO_CACHE1_DATA(get)));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000478 get = (get + 1) & 0x1ff;
Ben Skeggsb3beb162010-09-01 15:24:29 +1000479 ptr += 8;
Ben Skeggs6ee73862009-12-11 19:24:15 +1000480 }
481
482 /* guessing that all the 0x34xx regs aren't on NV50 */
Ben Skeggsac94a342010-07-08 15:28:48 +1000483 if (dev_priv->chipset != 0x50) {
Ben Skeggsb3beb162010-09-01 15:24:29 +1000484 nv_wo32(ramfc, 0x84, ptr >> 3);
485 nv_wo32(ramfc, 0x88, nv_rd32(dev, 0x340c));
486 nv_wo32(ramfc, 0x8c, nv_rd32(dev, 0x3400));
487 nv_wo32(ramfc, 0x90, nv_rd32(dev, 0x3404));
488 nv_wo32(ramfc, 0x94, nv_rd32(dev, 0x3408));
489 nv_wo32(ramfc, 0x98, nv_rd32(dev, 0x3410));
Ben Skeggs6ee73862009-12-11 19:24:15 +1000490 }
491
Ben Skeggsf56cb862010-07-08 11:29:10 +1000492 dev_priv->engine.instmem.flush(dev);
Ben Skeggs6ee73862009-12-11 19:24:15 +1000493
494 /*XXX: probably reload ch127 (NULL) state back too */
495 nv_wr32(dev, NV03_PFIFO_CACHE1_PUSH1, 127);
496 return 0;
497}
498
Ben Skeggs56ac7472010-10-22 10:26:24 +1000499void
500nv50_fifo_tlb_flush(struct drm_device *dev)
501{
Ben Skeggsa11c3192010-08-27 10:00:25 +1000502 nv50_vm_flush_engine(dev, 5);
Ben Skeggs56ac7472010-10-22 10:26:24 +1000503}