blob: b8e3edb5c063f2bb2f704485e6336779848c29ac [file] [log] [blame]
Ben Skeggs6ee73862009-12-11 19:24:15 +10001#include "drmP.h"
2#include "drm.h"
3#include "nouveau_drv.h"
Ben Skeggsfbd28952010-09-01 15:24:34 +10004#include "nouveau_ramht.h"
Ben Skeggs6ee73862009-12-11 19:24:15 +10005
6/* returns the size of fifo context */
7static int
8nouveau_fifo_ctx_size(struct drm_device *dev)
9{
10 struct drm_nouveau_private *dev_priv = dev->dev_private;
11
12 if (dev_priv->chipset >= 0x40)
13 return 128;
14 else
15 if (dev_priv->chipset >= 0x17)
16 return 64;
17
18 return 32;
19}
20
Ben Skeggs6ee73862009-12-11 19:24:15 +100021int nv04_instmem_init(struct drm_device *dev)
22{
23 struct drm_nouveau_private *dev_priv = dev->dev_private;
Ben Skeggsfbd28952010-09-01 15:24:34 +100024 struct nouveau_gpuobj *ramht = NULL;
Ben Skeggse05c5a32010-09-01 15:24:35 +100025 u32 offset, length;
Ben Skeggsb833ac22010-06-01 15:32:24 +100026 int ret;
Ben Skeggs6ee73862009-12-11 19:24:15 +100027
Francisco Jerez35fd5b42010-09-06 20:25:28 +020028 /* RAMIN always available */
29 dev_priv->ramin_available = true;
30
Ben Skeggsfbd28952010-09-01 15:24:34 +100031 /* Setup shared RAMHT */
Ben Skeggse05c5a32010-09-01 15:24:35 +100032 ret = nouveau_gpuobj_new_fake(dev, 0x10000, ~0, 4096,
Ben Skeggsfbd28952010-09-01 15:24:34 +100033 NVOBJ_FLAG_ZERO_ALLOC, &ramht);
34 if (ret)
35 return ret;
36
37 ret = nouveau_ramht_new(dev, ramht, &dev_priv->ramht);
38 nouveau_gpuobj_ref(NULL, &ramht);
39 if (ret)
40 return ret;
41
Ben Skeggse05c5a32010-09-01 15:24:35 +100042 /* And RAMRO */
43 ret = nouveau_gpuobj_new_fake(dev, 0x11200, ~0, 512,
44 NVOBJ_FLAG_ZERO_ALLOC, &dev_priv->ramro);
45 if (ret)
46 return ret;
47
48 /* And RAMFC */
49 length = dev_priv->engine.fifo.channels * nouveau_fifo_ctx_size(dev);
50 switch (dev_priv->card_type) {
51 case NV_40:
52 offset = 0x20000;
53 break;
54 default:
55 offset = 0x11400;
56 break;
57 }
58
59 ret = nouveau_gpuobj_new_fake(dev, offset, ~0, length,
60 NVOBJ_FLAG_ZERO_ALLOC, &dev_priv->ramfc);
61 if (ret)
62 return ret;
63
64 /* Only allow space after RAMFC to be used for object allocation */
65 offset += length;
Ben Skeggs6ee73862009-12-11 19:24:15 +100066
67 /* It appears RAMRO (or something?) is controlled by 0x2220/0x2230
68 * on certain NV4x chipsets as well as RAMFC. When 0x2230 == 0
69 * ("new style" control) the upper 16-bits of 0x2220 points at this
70 * other mysterious table that's clobbering important things.
71 *
72 * We're now pointing this at RAMIN+0x30000 to avoid RAMFC getting
73 * smashed to pieces on us, so reserve 0x30000-0x40000 too..
74 */
75 if (dev_priv->card_type >= NV_40) {
76 if (offset < 0x40000)
77 offset = 0x40000;
78 }
79
Ben Skeggsb833ac22010-06-01 15:32:24 +100080 ret = drm_mm_init(&dev_priv->ramin_heap, offset,
81 dev_priv->ramin_rsvd_vram - offset);
Ben Skeggs6ee73862009-12-11 19:24:15 +100082 if (ret) {
Ben Skeggsb833ac22010-06-01 15:32:24 +100083 NV_ERROR(dev, "Failed to init RAMIN heap: %d\n", ret);
84 return ret;
Ben Skeggs6ee73862009-12-11 19:24:15 +100085 }
86
Ben Skeggsb833ac22010-06-01 15:32:24 +100087 return 0;
Ben Skeggs6ee73862009-12-11 19:24:15 +100088}
89
90void
91nv04_instmem_takedown(struct drm_device *dev)
92{
Ben Skeggse05c5a32010-09-01 15:24:35 +100093 struct drm_nouveau_private *dev_priv = dev->dev_private;
94
95 nouveau_ramht_ref(NULL, &dev_priv->ramht, NULL);
96 nouveau_gpuobj_ref(NULL, &dev_priv->ramro);
97 nouveau_gpuobj_ref(NULL, &dev_priv->ramfc);
Ben Skeggs6ee73862009-12-11 19:24:15 +100098}
99
100int
Ben Skeggs6ee73862009-12-11 19:24:15 +1000101nv04_instmem_suspend(struct drm_device *dev)
102{
103 return 0;
104}
105
106void
107nv04_instmem_resume(struct drm_device *dev)
108{
109}
110
Ben Skeggse41115d2010-11-01 11:45:02 +1000111int
112nv04_instmem_get(struct nouveau_gpuobj *gpuobj, u32 size, u32 align)
113{
114 struct drm_nouveau_private *dev_priv = gpuobj->dev->dev_private;
115 struct drm_mm_node *ramin = NULL;
116
117 do {
118 if (drm_mm_pre_get(&dev_priv->ramin_heap))
119 return -ENOMEM;
120
121 spin_lock(&dev_priv->ramin_lock);
122 ramin = drm_mm_search_free(&dev_priv->ramin_heap, size, align, 0);
123 if (ramin == NULL) {
124 spin_unlock(&dev_priv->ramin_lock);
125 return -ENOMEM;
126 }
127
128 ramin = drm_mm_get_block_atomic(ramin, size, align);
129 spin_unlock(&dev_priv->ramin_lock);
130 } while (ramin == NULL);
131
132 gpuobj->node = ramin;
133 gpuobj->vinst = ramin->start;
134 return 0;
135}
136
137void
138nv04_instmem_put(struct nouveau_gpuobj *gpuobj)
139{
140 struct drm_nouveau_private *dev_priv = gpuobj->dev->dev_private;
141
142 spin_lock(&dev_priv->ramin_lock);
143 drm_mm_put_block(gpuobj->node);
144 gpuobj->node = NULL;
145 spin_unlock(&dev_priv->ramin_lock);
146}
147
148int
149nv04_instmem_map(struct nouveau_gpuobj *gpuobj)
150{
151 gpuobj->pinst = gpuobj->vinst;
152 return 0;
153}
154
155void
156nv04_instmem_unmap(struct nouveau_gpuobj *gpuobj)
157{
158}
159
160void
161nv04_instmem_flush(struct drm_device *dev)
162{
163}