Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 1 | /* |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 2 | * Copyright (C) 2012 Ben Skeggs. |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 3 | * All Rights Reserved. |
| 4 | * |
| 5 | * Permission is hereby granted, free of charge, to any person obtaining |
| 6 | * a copy of this software and associated documentation files (the |
| 7 | * "Software"), to deal in the Software without restriction, including |
| 8 | * without limitation the rights to use, copy, modify, merge, publish, |
| 9 | * distribute, sublicense, and/or sell copies of the Software, and to |
| 10 | * permit persons to whom the Software is furnished to do so, subject to |
| 11 | * the following conditions: |
| 12 | * |
| 13 | * The above copyright notice and this permission notice (including the |
| 14 | * next paragraph) shall be included in all copies or substantial |
| 15 | * portions of the Software. |
| 16 | * |
| 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
| 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| 19 | * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| 20 | * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE |
| 21 | * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
| 22 | * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION |
| 23 | * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
| 24 | * |
| 25 | */ |
| 26 | |
| 27 | #include "drmP.h" |
| 28 | #include "drm.h" |
| 29 | #include "nouveau_drv.h" |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 30 | #include "nouveau_fifo.h" |
Ben Skeggs | a8eaebc | 2010-09-01 15:24:31 +1000 | [diff] [blame] | 31 | #include "nouveau_ramht.h" |
Ben Skeggs | a11c319 | 2010-08-27 10:00:25 +1000 | [diff] [blame] | 32 | #include "nouveau_vm.h" |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 33 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 34 | struct nv50_fifo_priv { |
| 35 | struct nouveau_fifo_priv base; |
| 36 | struct nouveau_gpuobj *playlist[2]; |
| 37 | int cur_playlist; |
| 38 | }; |
| 39 | |
| 40 | struct nv50_fifo_chan { |
| 41 | struct nouveau_fifo_chan base; |
| 42 | }; |
| 43 | |
| 44 | void |
Ben Skeggs | ac94a34 | 2010-07-08 15:28:48 +1000 | [diff] [blame] | 45 | nv50_fifo_playlist_update(struct drm_device *dev) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 46 | { |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 47 | struct nv50_fifo_priv *priv = nv_engine(dev, NVOBJ_ENGINE_FIFO); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 48 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
Ben Skeggs | a8eaebc | 2010-09-01 15:24:31 +1000 | [diff] [blame] | 49 | struct nouveau_gpuobj *cur; |
Ben Skeggs | 694931d | 2012-05-01 13:59:31 +1000 | [diff] [blame] | 50 | int i, p; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 51 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 52 | cur = priv->playlist[priv->cur_playlist]; |
| 53 | priv->cur_playlist = !priv->cur_playlist; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 54 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 55 | for (i = 0, p = 0; i < priv->base.channels; i++) { |
Ben Skeggs | 694931d | 2012-05-01 13:59:31 +1000 | [diff] [blame] | 56 | if (nv_rd32(dev, 0x002600 + (i * 4)) & 0x80000000) |
| 57 | nv_wo32(cur, p++ * 4, i); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 58 | } |
Ben Skeggs | 694931d | 2012-05-01 13:59:31 +1000 | [diff] [blame] | 59 | |
Ben Skeggs | f56cb86 | 2010-07-08 11:29:10 +1000 | [diff] [blame] | 60 | dev_priv->engine.instmem.flush(dev); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 61 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 62 | nv_wr32(dev, 0x0032f4, cur->vinst >> 12); |
| 63 | nv_wr32(dev, 0x0032ec, p); |
| 64 | nv_wr32(dev, 0x002500, 0x00000101); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 65 | } |
| 66 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 67 | static int |
| 68 | nv50_fifo_context_new(struct nouveau_channel *chan, int engine) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 69 | { |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 70 | struct nv50_fifo_priv *priv = nv_engine(chan->dev, engine); |
| 71 | struct nv50_fifo_chan *fctx; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 72 | struct drm_device *dev = chan->dev; |
| 73 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 74 | u64 ib_offset = chan->pushbuf_base + chan->dma.ib_base * 4; |
| 75 | u64 instance = chan->ramin->vinst >> 12; |
Maarten Maathuis | ff9e527 | 2010-02-01 20:58:27 +0100 | [diff] [blame] | 76 | unsigned long flags; |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 77 | int ret = 0, i; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 78 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 79 | fctx = chan->engctx[engine] = kzalloc(sizeof(*fctx), GFP_KERNEL); |
| 80 | if (!fctx) |
| 81 | return -ENOMEM; |
| 82 | atomic_inc(&chan->vm->engref[engine]); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 83 | |
Ben Skeggs | d908175 | 2010-11-22 16:05:54 +1000 | [diff] [blame] | 84 | chan->user = ioremap(pci_resource_start(dev->pdev, 0) + |
| 85 | NV50_USER(chan->id), PAGE_SIZE); |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 86 | if (!chan->user) { |
| 87 | ret = -ENOMEM; |
| 88 | goto error; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 89 | } |
| 90 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 91 | for (i = 0; i < 0x100; i += 4) |
| 92 | nv_wo32(chan->ramin, i, 0x00000000); |
| 93 | nv_wo32(chan->ramin, 0x3c, 0x403f6078); |
| 94 | nv_wo32(chan->ramin, 0x40, 0x00000000); |
| 95 | nv_wo32(chan->ramin, 0x44, 0x01003fff); |
| 96 | nv_wo32(chan->ramin, 0x48, chan->pushbuf->cinst >> 4); |
| 97 | nv_wo32(chan->ramin, 0x50, lower_32_bits(ib_offset)); |
| 98 | nv_wo32(chan->ramin, 0x54, upper_32_bits(ib_offset) | |
| 99 | drm_order(chan->dma.ib_max + 1) << 16); |
| 100 | nv_wo32(chan->ramin, 0x60, 0x7fffffff); |
| 101 | nv_wo32(chan->ramin, 0x78, 0x00000000); |
| 102 | nv_wo32(chan->ramin, 0x7c, 0x30000001); |
| 103 | nv_wo32(chan->ramin, 0x80, ((chan->ramht->bits - 9) << 27) | |
| 104 | (4 << 24) /* SEARCH_FULL */ | |
| 105 | (chan->ramht->gpuobj->cinst >> 4)); |
| 106 | |
Ben Skeggs | f56cb86 | 2010-07-08 11:29:10 +1000 | [diff] [blame] | 107 | dev_priv->engine.instmem.flush(dev); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 108 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 109 | spin_lock_irqsave(&dev_priv->context_switch_lock, flags); |
| 110 | nv_wr32(dev, 0x002600 + (chan->id * 4), 0x80000000 | instance); |
Ben Skeggs | ac94a34 | 2010-07-08 15:28:48 +1000 | [diff] [blame] | 111 | nv50_fifo_playlist_update(dev); |
Maarten Maathuis | ff9e527 | 2010-02-01 20:58:27 +0100 | [diff] [blame] | 112 | spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags); |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 113 | |
| 114 | error: |
| 115 | if (ret) |
| 116 | priv->base.base.context_del(chan, engine); |
| 117 | return ret; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 118 | } |
| 119 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 120 | static bool |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 121 | nv50_fifo_kickoff(struct nouveau_channel *chan) |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 122 | { |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 123 | struct drm_device *dev = chan->dev; |
| 124 | bool done = true; |
| 125 | u32 me; |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 126 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 127 | /* HW bug workaround: |
| 128 | * |
| 129 | * PFIFO will hang forever if the connected engines don't report |
| 130 | * that they've processed the context switch request. |
| 131 | * |
| 132 | * In order for the kickoff to work, we need to ensure all the |
| 133 | * connected engines are in a state where they can answer. |
| 134 | * |
| 135 | * Newer chipsets don't seem to suffer from this issue, and well, |
| 136 | * there's also a "ignore these engines" bitmask reg we can use |
| 137 | * if we hit the issue there.. |
| 138 | */ |
| 139 | |
| 140 | /* PME: make sure engine is enabled */ |
| 141 | me = nv_mask(dev, 0x00b860, 0x00000001, 0x00000001); |
| 142 | |
| 143 | /* do the kickoff... */ |
| 144 | nv_wr32(dev, 0x0032fc, chan->ramin->vinst >> 12); |
| 145 | if (!nv_wait_ne(dev, 0x0032fc, 0xffffffff, 0xffffffff)) { |
| 146 | NV_INFO(dev, "PFIFO: channel %d unload timeout\n", chan->id); |
| 147 | done = false; |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 148 | } |
| 149 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 150 | /* restore any engine states we changed, and exit */ |
| 151 | nv_wr32(dev, 0x00b860, me); |
| 152 | return done; |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 153 | } |
| 154 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 155 | static void |
| 156 | nv50_fifo_context_del(struct nouveau_channel *chan, int engine) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 157 | { |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 158 | struct nv50_fifo_chan *fctx = chan->engctx[engine]; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 159 | struct drm_device *dev = chan->dev; |
Francisco Jerez | 3945e47 | 2010-10-18 03:53:39 +0200 | [diff] [blame] | 160 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
Francisco Jerez | 3945e47 | 2010-10-18 03:53:39 +0200 | [diff] [blame] | 161 | unsigned long flags; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 162 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 163 | /* remove channel from playlist, will context switch if active */ |
Francisco Jerez | 3945e47 | 2010-10-18 03:53:39 +0200 | [diff] [blame] | 164 | spin_lock_irqsave(&dev_priv->context_switch_lock, flags); |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 165 | nv_mask(dev, 0x002600 + (chan->id * 4), 0x80000000, 0x00000000); |
Ben Skeggs | ac94a34 | 2010-07-08 15:28:48 +1000 | [diff] [blame] | 166 | nv50_fifo_playlist_update(dev); |
Maarten Maathuis | a87ff62 | 2010-02-01 18:47:52 +0100 | [diff] [blame] | 167 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 168 | /* tell any engines on this channel to unload their contexts */ |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 169 | nv50_fifo_kickoff(chan); |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 170 | |
| 171 | nv_wr32(dev, 0x002600 + (chan->id * 4), 0x00000000); |
Francisco Jerez | 3945e47 | 2010-10-18 03:53:39 +0200 | [diff] [blame] | 172 | spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags); |
| 173 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 174 | /* clean up */ |
Ben Skeggs | d908175 | 2010-11-22 16:05:54 +1000 | [diff] [blame] | 175 | if (chan->user) { |
| 176 | iounmap(chan->user); |
| 177 | chan->user = NULL; |
| 178 | } |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 179 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 180 | atomic_dec(&chan->vm->engref[engine]); |
| 181 | chan->engctx[engine] = NULL; |
| 182 | kfree(fctx); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 183 | } |
| 184 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 185 | static int |
| 186 | nv50_fifo_init(struct drm_device *dev, int engine) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 187 | { |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 188 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 189 | u32 instance; |
| 190 | int i; |
| 191 | |
| 192 | nv_mask(dev, 0x000200, 0x00000100, 0x00000000); |
| 193 | nv_mask(dev, 0x000200, 0x00000100, 0x00000100); |
| 194 | nv_wr32(dev, 0x00250c, 0x6f3cfc34); |
| 195 | nv_wr32(dev, 0x002044, 0x01003fff); |
| 196 | |
| 197 | nv_wr32(dev, 0x002100, 0xffffffff); |
| 198 | nv_wr32(dev, 0x002140, 0xffffffff); |
| 199 | |
| 200 | for (i = 0; i < 128; i++) { |
| 201 | struct nouveau_channel *chan = dev_priv->channels.ptr[i]; |
| 202 | if (chan && chan->engctx[engine]) |
| 203 | instance = 0x80000000 | chan->ramin->vinst >> 12; |
| 204 | else |
| 205 | instance = 0x00000000; |
| 206 | nv_wr32(dev, 0x002600 + (i * 4), instance); |
| 207 | } |
| 208 | |
| 209 | nv50_fifo_playlist_update(dev); |
| 210 | |
| 211 | nv_wr32(dev, 0x003200, 1); |
| 212 | nv_wr32(dev, 0x003250, 1); |
| 213 | nv_wr32(dev, 0x002500, 1); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 214 | return 0; |
| 215 | } |
| 216 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 217 | static int |
| 218 | nv50_fifo_fini(struct drm_device *dev, int engine, bool suspend) |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 219 | { |
| 220 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 221 | struct nv50_fifo_priv *priv = nv_engine(dev, engine); |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 222 | int i; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 223 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 224 | /* set playlist length to zero, fifo will unload context */ |
| 225 | nv_wr32(dev, 0x0032ec, 0); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 226 | |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 227 | /* tell all connected engines to unload their contexts */ |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 228 | for (i = 0; i < priv->base.channels; i++) { |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 229 | struct nouveau_channel *chan = dev_priv->channels.ptr[i]; |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 230 | if (chan && !nv50_fifo_kickoff(chan)) |
Ben Skeggs | 03bd6ef | 2012-05-01 16:33:37 +1000 | [diff] [blame] | 231 | return -EBUSY; |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 232 | } |
| 233 | |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 234 | nv_wr32(dev, 0x002140, 0); |
Ben Skeggs | 6ee7386 | 2009-12-11 19:24:15 +1000 | [diff] [blame] | 235 | return 0; |
| 236 | } |
| 237 | |
Ben Skeggs | 56ac747 | 2010-10-22 10:26:24 +1000 | [diff] [blame] | 238 | void |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 239 | nv50_fifo_tlb_flush(struct drm_device *dev, int engine) |
Ben Skeggs | 56ac747 | 2010-10-22 10:26:24 +1000 | [diff] [blame] | 240 | { |
Ben Skeggs | a11c319 | 2010-08-27 10:00:25 +1000 | [diff] [blame] | 241 | nv50_vm_flush_engine(dev, 5); |
Ben Skeggs | 56ac747 | 2010-10-22 10:26:24 +1000 | [diff] [blame] | 242 | } |
Ben Skeggs | c420b2d | 2012-05-01 20:48:08 +1000 | [diff] [blame] | 243 | |
| 244 | void |
| 245 | nv50_fifo_destroy(struct drm_device *dev, int engine) |
| 246 | { |
| 247 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 248 | struct nv50_fifo_priv *priv = nv_engine(dev, engine); |
| 249 | |
| 250 | nouveau_irq_unregister(dev, 8); |
| 251 | |
| 252 | nouveau_gpuobj_ref(NULL, &priv->playlist[0]); |
| 253 | nouveau_gpuobj_ref(NULL, &priv->playlist[1]); |
| 254 | |
| 255 | dev_priv->eng[engine] = NULL; |
| 256 | kfree(priv); |
| 257 | } |
| 258 | |
| 259 | int |
| 260 | nv50_fifo_create(struct drm_device *dev) |
| 261 | { |
| 262 | struct drm_nouveau_private *dev_priv = dev->dev_private; |
| 263 | struct nv50_fifo_priv *priv; |
| 264 | int ret; |
| 265 | |
| 266 | priv = kzalloc(sizeof(*priv), GFP_KERNEL); |
| 267 | if (!priv) |
| 268 | return -ENOMEM; |
| 269 | |
| 270 | priv->base.base.destroy = nv50_fifo_destroy; |
| 271 | priv->base.base.init = nv50_fifo_init; |
| 272 | priv->base.base.fini = nv50_fifo_fini; |
| 273 | priv->base.base.context_new = nv50_fifo_context_new; |
| 274 | priv->base.base.context_del = nv50_fifo_context_del; |
| 275 | priv->base.base.tlb_flush = nv50_fifo_tlb_flush; |
| 276 | priv->base.channels = 127; |
| 277 | dev_priv->eng[NVOBJ_ENGINE_FIFO] = &priv->base.base; |
| 278 | |
| 279 | ret = nouveau_gpuobj_new(dev, NULL, priv->base.channels * 4, 0x1000, |
| 280 | NVOBJ_FLAG_ZERO_ALLOC, &priv->playlist[0]); |
| 281 | if (ret) |
| 282 | goto error; |
| 283 | |
| 284 | ret = nouveau_gpuobj_new(dev, NULL, priv->base.channels * 4, 0x1000, |
| 285 | NVOBJ_FLAG_ZERO_ALLOC, &priv->playlist[1]); |
| 286 | if (ret) |
| 287 | goto error; |
| 288 | |
| 289 | nouveau_irq_register(dev, 8, nv04_fifo_isr); |
| 290 | error: |
| 291 | if (ret) |
| 292 | priv->base.base.destroy(dev, NVOBJ_ENGINE_FIFO); |
| 293 | return ret; |
| 294 | } |