Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 1 | #include "drmP.h" |
| 2 | #include "nouveau_drv.h" |
| 3 | #include <linux/pagemap.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 4 | #include <linux/slab.h> |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 5 | |
| 6 | #define NV_CTXDMA_PAGE_SHIFT 12 |
| 7 | #define NV_CTXDMA_PAGE_SIZE (1 << NV_CTXDMA_PAGE_SHIFT) |
| 8 | #define NV_CTXDMA_PAGE_MASK (NV_CTXDMA_PAGE_SIZE - 1) |
| 9 | |
| 10 | struct nouveau_sgdma_be { |
| 11 | struct ttm_backend backend; |
| 12 | struct drm_device *dev; |
| 13 | |
| 14 | dma_addr_t *pages; |
| 15 | unsigned nr_pages; |
| 16 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 17 | u64 offset; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 18 | bool bound; |
| 19 | }; |
| 20 | |
| 21 | static int |
| 22 | nouveau_sgdma_populate(struct ttm_backend *be, unsigned long num_pages, |
| 23 | struct page **pages, struct page *dummy_read_page) |
| 24 | { |
| 25 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 26 | struct drm_device *dev = nvbe->dev; |
| 27 | |
| 28 | NV_DEBUG(nvbe->dev, "num_pages = %ld\n", num_pages); |
| 29 | |
| 30 | if (nvbe->pages) |
| 31 | return -EINVAL; |
| 32 | |
| 33 | nvbe->pages = kmalloc(sizeof(dma_addr_t) * num_pages, GFP_KERNEL); |
| 34 | if (!nvbe->pages) |
| 35 | return -ENOMEM; |
| 36 | |
| 37 | nvbe->nr_pages = 0; |
| 38 | while (num_pages--) { |
| 39 | nvbe->pages[nvbe->nr_pages] = |
| 40 | pci_map_page(dev->pdev, pages[nvbe->nr_pages], 0, |
| 41 | PAGE_SIZE, PCI_DMA_BIDIRECTIONAL); |
| 42 | if (pci_dma_mapping_error(dev->pdev, |
| 43 | nvbe->pages[nvbe->nr_pages])) { |
| 44 | be->func->clear(be); |
| 45 | return -EFAULT; |
| 46 | } |
| 47 | |
| 48 | nvbe->nr_pages++; |
| 49 | } |
| 50 | |
| 51 | return 0; |
| 52 | } |
| 53 | |
| 54 | static void |
| 55 | nouveau_sgdma_clear(struct ttm_backend *be) |
| 56 | { |
| 57 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
Marcin Slusarz | dd19e44 | 2010-01-30 15:41:00 +0100 | [diff] [blame] | 58 | struct drm_device *dev; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 59 | |
| 60 | if (nvbe && nvbe->pages) { |
Marcin Slusarz | dd19e44 | 2010-01-30 15:41:00 +0100 | [diff] [blame] | 61 | dev = nvbe->dev; |
| 62 | NV_DEBUG(dev, "\n"); |
| 63 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 64 | if (nvbe->bound) |
| 65 | be->func->unbind(be); |
| 66 | |
| 67 | while (nvbe->nr_pages--) { |
| 68 | pci_unmap_page(dev->pdev, nvbe->pages[nvbe->nr_pages], |
| 69 | PAGE_SIZE, PCI_DMA_BIDIRECTIONAL); |
| 70 | } |
| 71 | kfree(nvbe->pages); |
| 72 | nvbe->pages = NULL; |
| 73 | nvbe->nr_pages = 0; |
| 74 | } |
| 75 | } |
| 76 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 77 | static int |
| 78 | nouveau_sgdma_bind(struct ttm_backend *be, struct ttm_mem_reg *mem) |
| 79 | { |
| 80 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 81 | struct drm_device *dev = nvbe->dev; |
| 82 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 83 | struct nouveau_gpuobj *gpuobj = dev_priv->gart_info.sg_ctxdma; |
| 84 | unsigned i, j, pte; |
| 85 | |
Ben Skeggs | d961db7 | 2010-08-05 10:48:18 +1000 | [diff] [blame] | 86 | NV_DEBUG(dev, "pg=0x%lx\n", mem->start); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 87 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 88 | nvbe->offset = mem->start << PAGE_SHIFT; |
| 89 | pte = (nvbe->offset >> NV_CTXDMA_PAGE_SHIFT) + 2; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 90 | for (i = 0; i < nvbe->nr_pages; i++) { |
| 91 | dma_addr_t dma_offset = nvbe->pages[i]; |
| 92 | uint32_t offset_l = lower_32_bits(dma_offset); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 93 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 94 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++, pte++) { |
| 95 | nv_wo32(gpuobj, (pte * 4) + 0, offset_l | 3); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 96 | dma_offset += NV_CTXDMA_PAGE_SIZE; |
| 97 | } |
| 98 | } |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 99 | |
| 100 | nvbe->bound = true; |
| 101 | return 0; |
| 102 | } |
| 103 | |
| 104 | static int |
| 105 | nouveau_sgdma_unbind(struct ttm_backend *be) |
| 106 | { |
| 107 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 108 | struct drm_device *dev = nvbe->dev; |
| 109 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 110 | struct nouveau_gpuobj *gpuobj = dev_priv->gart_info.sg_ctxdma; |
| 111 | unsigned i, j, pte; |
| 112 | |
| 113 | NV_DEBUG(dev, "\n"); |
| 114 | |
| 115 | if (!nvbe->bound) |
| 116 | return 0; |
| 117 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 118 | pte = (nvbe->offset >> NV_CTXDMA_PAGE_SHIFT) + 2; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 119 | for (i = 0; i < nvbe->nr_pages; i++) { |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 120 | for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++, pte++) |
| 121 | nv_wo32(gpuobj, (pte * 4) + 0, 0x00000000); |
Ben Skeggs | 40b2a68 | 2010-03-15 16:43:47 +1000 | [diff] [blame] | 122 | } |
| 123 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 124 | nvbe->bound = false; |
| 125 | return 0; |
| 126 | } |
| 127 | |
| 128 | static void |
| 129 | nouveau_sgdma_destroy(struct ttm_backend *be) |
| 130 | { |
| 131 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 132 | |
| 133 | if (be) { |
| 134 | NV_DEBUG(nvbe->dev, "\n"); |
| 135 | |
| 136 | if (nvbe) { |
| 137 | if (nvbe->pages) |
| 138 | be->func->clear(be); |
| 139 | kfree(nvbe); |
| 140 | } |
| 141 | } |
| 142 | } |
| 143 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 144 | static int |
| 145 | nv50_sgdma_bind(struct ttm_backend *be, struct ttm_mem_reg *mem) |
| 146 | { |
| 147 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 148 | struct drm_nouveau_private *dev_priv = nvbe->dev->dev_private; |
| 149 | |
| 150 | nvbe->offset = mem->start << PAGE_SHIFT; |
| 151 | |
| 152 | nouveau_vm_map_sg(&dev_priv->gart_info.vma, nvbe->offset, |
| 153 | nvbe->nr_pages << PAGE_SHIFT, nvbe->pages); |
| 154 | nvbe->bound = true; |
| 155 | return 0; |
| 156 | } |
| 157 | |
| 158 | static int |
| 159 | nv50_sgdma_unbind(struct ttm_backend *be) |
| 160 | { |
| 161 | struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)be; |
| 162 | struct drm_nouveau_private *dev_priv = nvbe->dev->dev_private; |
| 163 | |
| 164 | if (!nvbe->bound) |
| 165 | return 0; |
| 166 | |
| 167 | nouveau_vm_unmap_at(&dev_priv->gart_info.vma, nvbe->offset, |
| 168 | nvbe->nr_pages << PAGE_SHIFT); |
| 169 | nvbe->bound = false; |
| 170 | return 0; |
| 171 | } |
| 172 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 173 | static struct ttm_backend_func nouveau_sgdma_backend = { |
| 174 | .populate = nouveau_sgdma_populate, |
| 175 | .clear = nouveau_sgdma_clear, |
| 176 | .bind = nouveau_sgdma_bind, |
| 177 | .unbind = nouveau_sgdma_unbind, |
| 178 | .destroy = nouveau_sgdma_destroy |
| 179 | }; |
| 180 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 181 | static struct ttm_backend_func nv50_sgdma_backend = { |
| 182 | .populate = nouveau_sgdma_populate, |
| 183 | .clear = nouveau_sgdma_clear, |
| 184 | .bind = nv50_sgdma_bind, |
| 185 | .unbind = nv50_sgdma_unbind, |
| 186 | .destroy = nouveau_sgdma_destroy |
| 187 | }; |
| 188 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 189 | struct ttm_backend * |
| 190 | nouveau_sgdma_init_ttm(struct drm_device *dev) |
| 191 | { |
| 192 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 193 | struct nouveau_sgdma_be *nvbe; |
| 194 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 195 | nvbe = kzalloc(sizeof(*nvbe), GFP_KERNEL); |
| 196 | if (!nvbe) |
| 197 | return NULL; |
| 198 | |
| 199 | nvbe->dev = dev; |
| 200 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 201 | if (dev_priv->card_type < NV_50) |
| 202 | nvbe->backend.func = &nouveau_sgdma_backend; |
| 203 | else |
| 204 | nvbe->backend.func = &nv50_sgdma_backend; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 205 | return &nvbe->backend; |
| 206 | } |
| 207 | |
| 208 | int |
| 209 | nouveau_sgdma_init(struct drm_device *dev) |
| 210 | { |
| 211 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 212 | struct nouveau_gpuobj *gpuobj = NULL; |
| 213 | uint32_t aper_size, obj_size; |
| 214 | int i, ret; |
| 215 | |
| 216 | if (dev_priv->card_type < NV_50) { |
Francisco Jerez | 9d5a6c4 | 2010-11-01 18:08:26 +0100 | [diff] [blame] | 217 | if(dev_priv->ramin_rsvd_vram < 2 * 1024 * 1024) |
| 218 | aper_size = 64 * 1024 * 1024; |
| 219 | else |
| 220 | aper_size = 512 * 1024 * 1024; |
| 221 | |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 222 | obj_size = (aper_size >> NV_CTXDMA_PAGE_SHIFT) * 4; |
| 223 | obj_size += 8; /* ctxdma header */ |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 224 | |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 225 | ret = nouveau_gpuobj_new(dev, NULL, obj_size, 16, |
| 226 | NVOBJ_FLAG_ZERO_ALLOC | |
| 227 | NVOBJ_FLAG_ZERO_FREE, &gpuobj); |
| 228 | if (ret) { |
| 229 | NV_ERROR(dev, "Error creating sgdma object: %d\n", ret); |
| 230 | return ret; |
| 231 | } |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 232 | |
Ben Skeggs | b3beb16 | 2010-09-01 15:24:29 +1000 | [diff] [blame] | 233 | nv_wo32(gpuobj, 0, NV_CLASS_DMA_IN_MEMORY | |
| 234 | (1 << 12) /* PT present */ | |
| 235 | (0 << 13) /* PT *not* linear */ | |
Ben Skeggs | 7f4a195 | 2010-11-16 11:50:09 +1000 | [diff] [blame] | 236 | (0 << 14) /* RW */ | |
| 237 | (2 << 16) /* PCI */); |
Ben Skeggs | b3beb16 | 2010-09-01 15:24:29 +1000 | [diff] [blame] | 238 | nv_wo32(gpuobj, 4, aper_size - 1); |
Ben Skeggs | 7b4808b | 2010-11-15 12:54:57 +1000 | [diff] [blame] | 239 | for (i = 2; i < 2 + (aper_size >> 12); i++) |
| 240 | nv_wo32(gpuobj, i * 4, 0x00000000); |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 241 | |
| 242 | dev_priv->gart_info.sg_ctxdma = gpuobj; |
| 243 | dev_priv->gart_info.aper_base = 0; |
| 244 | dev_priv->gart_info.aper_size = aper_size; |
| 245 | } else |
| 246 | if (dev_priv->chan_vm) { |
| 247 | ret = nouveau_vm_get(dev_priv->chan_vm, 512 * 1024 * 1024, |
| 248 | 12, NV_MEM_ACCESS_RW, |
| 249 | &dev_priv->gart_info.vma); |
| 250 | if (ret) |
| 251 | return ret; |
| 252 | |
| 253 | dev_priv->gart_info.aper_base = dev_priv->gart_info.vma.offset; |
| 254 | dev_priv->gart_info.aper_size = 512 * 1024 * 1024; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 255 | } |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 256 | |
| 257 | dev_priv->gart_info.type = NOUVEAU_GART_SGDMA; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 258 | return 0; |
| 259 | } |
| 260 | |
| 261 | void |
| 262 | nouveau_sgdma_takedown(struct drm_device *dev) |
| 263 | { |
| 264 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 265 | |
Ben Skeggs | a8eaebc | 2010-09-01 15:24:31 +1000 | [diff] [blame] | 266 | nouveau_gpuobj_ref(NULL, &dev_priv->gart_info.sg_ctxdma); |
Ben Skeggs | b571fe2 | 2010-11-16 10:13:05 +1000 | [diff] [blame] | 267 | nouveau_vm_put(&dev_priv->gart_info.vma); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 268 | } |
| 269 | |
Francisco Jerez | fd70b6c | 2010-12-08 02:37:12 +0100 | [diff] [blame] | 270 | uint32_t |
| 271 | nouveau_sgdma_get_physical(struct drm_device *dev, uint32_t offset) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 272 | { |
| 273 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 274 | struct nouveau_gpuobj *gpuobj = dev_priv->gart_info.sg_ctxdma; |
Francisco Jerez | fd70b6c | 2010-12-08 02:37:12 +0100 | [diff] [blame] | 275 | int pte = (offset >> NV_CTXDMA_PAGE_SHIFT) + 2; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 276 | |
Francisco Jerez | fd70b6c | 2010-12-08 02:37:12 +0100 | [diff] [blame] | 277 | BUG_ON(dev_priv->card_type >= NV_50); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 278 | |
Francisco Jerez | fd70b6c | 2010-12-08 02:37:12 +0100 | [diff] [blame] | 279 | return (nv_ro32(gpuobj, 4 * pte) & ~NV_CTXDMA_PAGE_MASK) | |
| 280 | (offset & NV_CTXDMA_PAGE_MASK); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 281 | } |