Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1 | /* |
Heikki Krogerus | b801479 | 2012-10-18 17:34:08 +0300 | [diff] [blame] | 2 | * Core driver for the Synopsys DesignWare DMA Controller |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 3 | * |
| 4 | * Copyright (C) 2007-2008 Atmel Corporation |
Viresh Kumar | aecb7b6 | 2011-05-24 14:04:09 +0530 | [diff] [blame] | 5 | * Copyright (C) 2010-2011 ST Microelectronics |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 6 | * Copyright (C) 2013 Intel Corporation |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 7 | * |
| 8 | * This program is free software; you can redistribute it and/or modify |
| 9 | * it under the terms of the GNU General Public License version 2 as |
| 10 | * published by the Free Software Foundation. |
| 11 | */ |
Heikki Krogerus | b801479 | 2012-10-18 17:34:08 +0300 | [diff] [blame] | 12 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 13 | #include <linux/bitops.h> |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 14 | #include <linux/delay.h> |
| 15 | #include <linux/dmaengine.h> |
| 16 | #include <linux/dma-mapping.h> |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 17 | #include <linux/dmapool.h> |
Thierry Reding | 7331205 | 2013-01-21 11:09:00 +0100 | [diff] [blame] | 18 | #include <linux/err.h> |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 19 | #include <linux/init.h> |
| 20 | #include <linux/interrupt.h> |
| 21 | #include <linux/io.h> |
| 22 | #include <linux/mm.h> |
| 23 | #include <linux/module.h> |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 24 | #include <linux/slab.h> |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 25 | #include <linux/pm_runtime.h> |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 26 | |
Andy Shevchenko | 61a7649 | 2013-06-05 15:26:44 +0300 | [diff] [blame] | 27 | #include "../dmaengine.h" |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 28 | #include "internal.h" |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 29 | |
| 30 | /* |
| 31 | * This supports the Synopsys "DesignWare AHB Central DMA Controller", |
| 32 | * (DW_ahb_dmac) which is used with various AMBA 2.0 systems (not all |
| 33 | * of which use ARM any more). See the "Databook" from Synopsys for |
| 34 | * information beyond what licensees probably provide. |
| 35 | * |
Andy Shevchenko | dd5720b | 2014-02-12 11:16:17 +0200 | [diff] [blame] | 36 | * The driver has been tested with the Atmel AT32AP7000, which does not |
| 37 | * support descriptor writeback. |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 38 | */ |
| 39 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 40 | #define DWC_DEFAULT_CTLLO(_chan) ({ \ |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 41 | struct dw_dma_chan *_dwc = to_dw_dma_chan(_chan); \ |
| 42 | struct dma_slave_config *_sconfig = &_dwc->dma_sconfig; \ |
Andy Shevchenko | 495aea4 | 2013-01-10 11:11:41 +0200 | [diff] [blame] | 43 | bool _is_slave = is_slave_direction(_dwc->direction); \ |
Andy Shevchenko | 495aea4 | 2013-01-10 11:11:41 +0200 | [diff] [blame] | 44 | u8 _smsize = _is_slave ? _sconfig->src_maxburst : \ |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 45 | DW_DMA_MSIZE_16; \ |
Andy Shevchenko | 495aea4 | 2013-01-10 11:11:41 +0200 | [diff] [blame] | 46 | u8 _dmsize = _is_slave ? _sconfig->dst_maxburst : \ |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 47 | DW_DMA_MSIZE_16; \ |
Mans Rullgard | bb3450a | 2016-03-18 16:24:42 +0200 | [diff] [blame] | 48 | u8 _dms = (_dwc->direction == DMA_MEM_TO_DEV) ? \ |
| 49 | _dwc->p_master : _dwc->m_master; \ |
| 50 | u8 _sms = (_dwc->direction == DMA_DEV_TO_MEM) ? \ |
| 51 | _dwc->p_master : _dwc->m_master; \ |
Jamie Iles | f301c06 | 2011-01-21 14:11:53 +0000 | [diff] [blame] | 52 | \ |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 53 | (DWC_CTLL_DST_MSIZE(_dmsize) \ |
| 54 | | DWC_CTLL_SRC_MSIZE(_smsize) \ |
Jamie Iles | f301c06 | 2011-01-21 14:11:53 +0000 | [diff] [blame] | 55 | | DWC_CTLL_LLP_D_EN \ |
| 56 | | DWC_CTLL_LLP_S_EN \ |
Mans Rullgard | bb3450a | 2016-03-18 16:24:42 +0200 | [diff] [blame] | 57 | | DWC_CTLL_DMS(_dms) \ |
| 58 | | DWC_CTLL_SMS(_sms)); \ |
Jamie Iles | f301c06 | 2011-01-21 14:11:53 +0000 | [diff] [blame] | 59 | }) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 60 | |
| 61 | /* |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 62 | * Number of descriptors to allocate for each channel. This should be |
| 63 | * made configurable somehow; preferably, the clients (at least the |
| 64 | * ones using slave transfers) should be able to give us a hint. |
| 65 | */ |
| 66 | #define NR_DESCS_PER_CHANNEL 64 |
| 67 | |
Andy Shevchenko | 029a40e | 2015-01-02 16:17:24 +0200 | [diff] [blame] | 68 | /* The set of bus widths supported by the DMA controller */ |
| 69 | #define DW_DMA_BUSWIDTHS \ |
| 70 | BIT(DMA_SLAVE_BUSWIDTH_UNDEFINED) | \ |
| 71 | BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \ |
| 72 | BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \ |
| 73 | BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
| 74 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 75 | /*----------------------------------------------------------------------*/ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 76 | |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 77 | static struct device *chan2dev(struct dma_chan *chan) |
| 78 | { |
| 79 | return &chan->dev->device; |
| 80 | } |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 81 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 82 | static struct dw_desc *dwc_first_active(struct dw_dma_chan *dwc) |
| 83 | { |
Andy Shevchenko | e63a47a3 | 2012-10-18 17:34:12 +0300 | [diff] [blame] | 84 | return to_dw_desc(dwc->active_list.next); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 85 | } |
| 86 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 87 | static struct dw_desc *dwc_desc_get(struct dw_dma_chan *dwc) |
| 88 | { |
| 89 | struct dw_desc *desc, *_desc; |
| 90 | struct dw_desc *ret = NULL; |
| 91 | unsigned int i = 0; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 92 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 93 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 94 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 95 | list_for_each_entry_safe(desc, _desc, &dwc->free_list, desc_node) { |
Andy Shevchenko | 2ab3727 | 2012-06-19 13:34:04 +0300 | [diff] [blame] | 96 | i++; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 97 | if (async_tx_test_ack(&desc->txd)) { |
| 98 | list_del(&desc->desc_node); |
| 99 | ret = desc; |
| 100 | break; |
| 101 | } |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 102 | dev_dbg(chan2dev(&dwc->chan), "desc %p not ACKed\n", desc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 103 | } |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 104 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 105 | |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 106 | dev_vdbg(chan2dev(&dwc->chan), "scanned %u descriptors on freelist\n", i); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 107 | |
| 108 | return ret; |
| 109 | } |
| 110 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 111 | /* |
| 112 | * Move a descriptor, including any children, to the free list. |
| 113 | * `desc' must not be on any lists. |
| 114 | */ |
| 115 | static void dwc_desc_put(struct dw_dma_chan *dwc, struct dw_desc *desc) |
| 116 | { |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 117 | unsigned long flags; |
| 118 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 119 | if (desc) { |
| 120 | struct dw_desc *child; |
| 121 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 122 | spin_lock_irqsave(&dwc->lock, flags); |
Dan Williams | e0bd0f8 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 123 | list_for_each_entry(child, &desc->tx_list, desc_node) |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 124 | dev_vdbg(chan2dev(&dwc->chan), |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 125 | "moving child desc %p to freelist\n", |
| 126 | child); |
Dan Williams | e0bd0f8 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 127 | list_splice_init(&desc->tx_list, &dwc->free_list); |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 128 | dev_vdbg(chan2dev(&dwc->chan), "moving desc %p to freelist\n", desc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 129 | list_add(&desc->desc_node, &dwc->free_list); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 130 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 131 | } |
| 132 | } |
| 133 | |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 134 | static void dwc_initialize(struct dw_dma_chan *dwc) |
| 135 | { |
| 136 | struct dw_dma *dw = to_dw_dma(dwc->chan.device); |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 137 | u32 cfghi = DWC_CFGH_FIFO_MODE; |
| 138 | u32 cfglo = DWC_CFGL_CH_PRIOR(dwc->priority); |
| 139 | |
Andy Shevchenko | 423f9cb | 2016-03-18 16:24:52 +0200 | [diff] [blame^] | 140 | if (test_bit(DW_DMA_IS_INITIALIZED, &dwc->flags)) |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 141 | return; |
| 142 | |
Andy Shevchenko | 3fe6409 | 2016-04-08 16:22:17 +0300 | [diff] [blame] | 143 | cfghi |= DWC_CFGH_DST_PER(dwc->dst_id); |
| 144 | cfghi |= DWC_CFGH_SRC_PER(dwc->src_id); |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 145 | |
| 146 | channel_writel(dwc, CFG_LO, cfglo); |
| 147 | channel_writel(dwc, CFG_HI, cfghi); |
| 148 | |
| 149 | /* Enable interrupts */ |
| 150 | channel_set_bit(dw, MASK.XFER, dwc->mask); |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 151 | channel_set_bit(dw, MASK.ERROR, dwc->mask); |
| 152 | |
Andy Shevchenko | 423f9cb | 2016-03-18 16:24:52 +0200 | [diff] [blame^] | 153 | set_bit(DW_DMA_IS_INITIALIZED, &dwc->flags); |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 154 | } |
| 155 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 156 | /*----------------------------------------------------------------------*/ |
| 157 | |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 158 | static inline unsigned int dwc_fast_ffs(unsigned long long v) |
Andy Shevchenko | 4c2d56c | 2012-06-19 13:34:08 +0300 | [diff] [blame] | 159 | { |
| 160 | /* |
| 161 | * We can be a lot more clever here, but this should take care |
| 162 | * of the most common optimization. |
| 163 | */ |
| 164 | if (!(v & 7)) |
| 165 | return 3; |
| 166 | else if (!(v & 3)) |
| 167 | return 2; |
| 168 | else if (!(v & 1)) |
| 169 | return 1; |
| 170 | return 0; |
| 171 | } |
| 172 | |
Andy Shevchenko | f52b36d | 2012-09-21 15:05:44 +0300 | [diff] [blame] | 173 | static inline void dwc_dump_chan_regs(struct dw_dma_chan *dwc) |
Andy Shevchenko | 1d45543 | 2012-06-19 13:34:03 +0300 | [diff] [blame] | 174 | { |
| 175 | dev_err(chan2dev(&dwc->chan), |
| 176 | " SAR: 0x%x DAR: 0x%x LLP: 0x%x CTL: 0x%x:%08x\n", |
| 177 | channel_readl(dwc, SAR), |
| 178 | channel_readl(dwc, DAR), |
| 179 | channel_readl(dwc, LLP), |
| 180 | channel_readl(dwc, CTL_HI), |
| 181 | channel_readl(dwc, CTL_LO)); |
| 182 | } |
| 183 | |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 184 | static inline void dwc_chan_disable(struct dw_dma *dw, struct dw_dma_chan *dwc) |
| 185 | { |
| 186 | channel_clear_bit(dw, CH_EN, dwc->mask); |
| 187 | while (dma_readl(dw, CH_EN) & dwc->mask) |
| 188 | cpu_relax(); |
| 189 | } |
| 190 | |
Andy Shevchenko | 1d45543 | 2012-06-19 13:34:03 +0300 | [diff] [blame] | 191 | /*----------------------------------------------------------------------*/ |
| 192 | |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 193 | /* Perform single block transfer */ |
| 194 | static inline void dwc_do_single_block(struct dw_dma_chan *dwc, |
| 195 | struct dw_desc *desc) |
| 196 | { |
| 197 | struct dw_dma *dw = to_dw_dma(dwc->chan.device); |
| 198 | u32 ctllo; |
| 199 | |
Andy Shevchenko | 1d566f1 | 2014-01-13 14:04:48 +0200 | [diff] [blame] | 200 | /* |
| 201 | * Software emulation of LLP mode relies on interrupts to continue |
| 202 | * multi block transfer. |
| 203 | */ |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 204 | ctllo = lli_read(desc, ctllo) | DWC_CTLL_INT_EN; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 205 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 206 | channel_writel(dwc, SAR, lli_read(desc, sar)); |
| 207 | channel_writel(dwc, DAR, lli_read(desc, dar)); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 208 | channel_writel(dwc, CTL_LO, ctllo); |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 209 | channel_writel(dwc, CTL_HI, lli_read(desc, ctlhi)); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 210 | channel_set_bit(dw, CH_EN, dwc->mask); |
Andy Shevchenko | f5c6a7d | 2013-01-09 10:17:13 +0200 | [diff] [blame] | 211 | |
| 212 | /* Move pointer to next descriptor */ |
| 213 | dwc->tx_node_active = dwc->tx_node_active->next; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 214 | } |
| 215 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 216 | /* Called with dwc->lock held and bh disabled */ |
| 217 | static void dwc_dostart(struct dw_dma_chan *dwc, struct dw_desc *first) |
| 218 | { |
| 219 | struct dw_dma *dw = to_dw_dma(dwc->chan.device); |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 220 | u8 lms = DWC_LLP_LMS(dwc->m_master); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 221 | unsigned long was_soft_llp; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 222 | |
| 223 | /* ASSERT: channel is idle */ |
| 224 | if (dma_readl(dw, CH_EN) & dwc->mask) { |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 225 | dev_err(chan2dev(&dwc->chan), |
Jarkko Nikula | 550da64 | 2015-03-10 11:37:23 +0200 | [diff] [blame] | 226 | "%s: BUG: Attempted to start non-idle channel\n", |
| 227 | __func__); |
Andy Shevchenko | 1d45543 | 2012-06-19 13:34:03 +0300 | [diff] [blame] | 228 | dwc_dump_chan_regs(dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 229 | |
| 230 | /* The tasklet will hopefully advance the queue... */ |
| 231 | return; |
| 232 | } |
| 233 | |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 234 | if (dwc->nollp) { |
| 235 | was_soft_llp = test_and_set_bit(DW_DMA_IS_SOFT_LLP, |
| 236 | &dwc->flags); |
| 237 | if (was_soft_llp) { |
| 238 | dev_err(chan2dev(&dwc->chan), |
Andy Shevchenko | fc61f6b | 2014-01-13 14:04:49 +0200 | [diff] [blame] | 239 | "BUG: Attempted to start new LLP transfer inside ongoing one\n"); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 240 | return; |
| 241 | } |
| 242 | |
| 243 | dwc_initialize(dwc); |
| 244 | |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 245 | dwc->residue = first->total_len; |
Andy Shevchenko | f5c6a7d | 2013-01-09 10:17:13 +0200 | [diff] [blame] | 246 | dwc->tx_node_active = &first->tx_list; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 247 | |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 248 | /* Submit first block */ |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 249 | dwc_do_single_block(dwc, first); |
| 250 | |
| 251 | return; |
| 252 | } |
| 253 | |
Viresh Kumar | 61e183f | 2011-11-17 16:01:29 +0530 | [diff] [blame] | 254 | dwc_initialize(dwc); |
| 255 | |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 256 | channel_writel(dwc, LLP, first->txd.phys | lms); |
| 257 | channel_writel(dwc, CTL_LO, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 258 | channel_writel(dwc, CTL_HI, 0); |
| 259 | channel_set_bit(dw, CH_EN, dwc->mask); |
| 260 | } |
| 261 | |
Andy Shevchenko | e7637c6 | 2014-06-18 12:15:36 +0300 | [diff] [blame] | 262 | static void dwc_dostart_first_queued(struct dw_dma_chan *dwc) |
| 263 | { |
Andy Shevchenko | cba1561 | 2014-06-18 12:15:37 +0300 | [diff] [blame] | 264 | struct dw_desc *desc; |
| 265 | |
Andy Shevchenko | e7637c6 | 2014-06-18 12:15:36 +0300 | [diff] [blame] | 266 | if (list_empty(&dwc->queue)) |
| 267 | return; |
| 268 | |
| 269 | list_move(dwc->queue.next, &dwc->active_list); |
Andy Shevchenko | cba1561 | 2014-06-18 12:15:37 +0300 | [diff] [blame] | 270 | desc = dwc_first_active(dwc); |
| 271 | dev_vdbg(chan2dev(&dwc->chan), "%s: started %u\n", __func__, desc->txd.cookie); |
| 272 | dwc_dostart(dwc, desc); |
Andy Shevchenko | e7637c6 | 2014-06-18 12:15:36 +0300 | [diff] [blame] | 273 | } |
| 274 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 275 | /*----------------------------------------------------------------------*/ |
| 276 | |
| 277 | static void |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 278 | dwc_descriptor_complete(struct dw_dma_chan *dwc, struct dw_desc *desc, |
| 279 | bool callback_required) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 280 | { |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 281 | dma_async_tx_callback callback = NULL; |
| 282 | void *param = NULL; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 283 | struct dma_async_tx_descriptor *txd = &desc->txd; |
Viresh Kumar | e518076 | 2011-03-03 15:47:20 +0530 | [diff] [blame] | 284 | struct dw_desc *child; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 285 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 286 | |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 287 | dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 288 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 289 | spin_lock_irqsave(&dwc->lock, flags); |
Russell King - ARM Linux | f7fbce0 | 2012-03-06 22:35:07 +0000 | [diff] [blame] | 290 | dma_cookie_complete(txd); |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 291 | if (callback_required) { |
| 292 | callback = txd->callback; |
| 293 | param = txd->callback_param; |
| 294 | } |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 295 | |
Viresh Kumar | e518076 | 2011-03-03 15:47:20 +0530 | [diff] [blame] | 296 | /* async_tx_ack */ |
| 297 | list_for_each_entry(child, &desc->tx_list, desc_node) |
| 298 | async_tx_ack(&child->txd); |
| 299 | async_tx_ack(&desc->txd); |
| 300 | |
Dan Williams | e0bd0f8 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 301 | list_splice_init(&desc->tx_list, &dwc->free_list); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 302 | list_move(&desc->desc_node, &dwc->free_list); |
| 303 | |
Dan Williams | d38a8c6 | 2013-10-18 19:35:23 +0200 | [diff] [blame] | 304 | dma_descriptor_unmap(txd); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 305 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 306 | |
Andy Shevchenko | 21e93c1 | 2013-01-09 10:17:12 +0200 | [diff] [blame] | 307 | if (callback) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 308 | callback(param); |
| 309 | } |
| 310 | |
| 311 | static void dwc_complete_all(struct dw_dma *dw, struct dw_dma_chan *dwc) |
| 312 | { |
| 313 | struct dw_desc *desc, *_desc; |
| 314 | LIST_HEAD(list); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 315 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 316 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 317 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 318 | if (dma_readl(dw, CH_EN) & dwc->mask) { |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 319 | dev_err(chan2dev(&dwc->chan), |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 320 | "BUG: XFER bit set, but channel not idle!\n"); |
| 321 | |
| 322 | /* Try to continue after resetting the channel... */ |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 323 | dwc_chan_disable(dw, dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 324 | } |
| 325 | |
| 326 | /* |
| 327 | * Submit queued descriptors ASAP, i.e. before we go through |
| 328 | * the completed ones. |
| 329 | */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 330 | list_splice_init(&dwc->active_list, &list); |
Andy Shevchenko | e7637c6 | 2014-06-18 12:15:36 +0300 | [diff] [blame] | 331 | dwc_dostart_first_queued(dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 332 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 333 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 334 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 335 | list_for_each_entry_safe(desc, _desc, &list, desc_node) |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 336 | dwc_descriptor_complete(dwc, desc, true); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 337 | } |
| 338 | |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 339 | /* Returns how many bytes were already received from source */ |
| 340 | static inline u32 dwc_get_sent(struct dw_dma_chan *dwc) |
| 341 | { |
| 342 | u32 ctlhi = channel_readl(dwc, CTL_HI); |
| 343 | u32 ctllo = channel_readl(dwc, CTL_LO); |
| 344 | |
| 345 | return (ctlhi & DWC_CTLH_BLOCK_TS_MASK) * (1 << (ctllo >> 4 & 7)); |
| 346 | } |
| 347 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 348 | static void dwc_scan_descriptors(struct dw_dma *dw, struct dw_dma_chan *dwc) |
| 349 | { |
| 350 | dma_addr_t llp; |
| 351 | struct dw_desc *desc, *_desc; |
| 352 | struct dw_desc *child; |
| 353 | u32 status_xfer; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 354 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 355 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 356 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 357 | llp = channel_readl(dwc, LLP); |
| 358 | status_xfer = dma_readl(dw, RAW.XFER); |
| 359 | |
| 360 | if (status_xfer & dwc->mask) { |
| 361 | /* Everything we've submitted is done */ |
| 362 | dma_writel(dw, CLEAR.XFER, dwc->mask); |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 363 | |
| 364 | if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 365 | struct list_head *head, *active = dwc->tx_node_active; |
| 366 | |
| 367 | /* |
| 368 | * We are inside first active descriptor. |
| 369 | * Otherwise something is really wrong. |
| 370 | */ |
| 371 | desc = dwc_first_active(dwc); |
| 372 | |
| 373 | head = &desc->tx_list; |
| 374 | if (active != head) { |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 375 | /* Update desc to reflect last sent one */ |
| 376 | if (active != head->next) |
| 377 | desc = to_dw_desc(active->prev); |
| 378 | |
| 379 | dwc->residue -= desc->len; |
| 380 | |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 381 | child = to_dw_desc(active); |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 382 | |
| 383 | /* Submit next block */ |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 384 | dwc_do_single_block(dwc, child); |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 385 | |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 386 | spin_unlock_irqrestore(&dwc->lock, flags); |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 387 | return; |
| 388 | } |
Andy Shevchenko | fdf475f | 2013-01-25 11:48:00 +0200 | [diff] [blame] | 389 | |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 390 | /* We are done here */ |
| 391 | clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); |
| 392 | } |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 393 | |
| 394 | dwc->residue = 0; |
| 395 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 396 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 397 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 398 | dwc_complete_all(dw, dwc); |
| 399 | return; |
| 400 | } |
| 401 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 402 | if (list_empty(&dwc->active_list)) { |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 403 | dwc->residue = 0; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 404 | spin_unlock_irqrestore(&dwc->lock, flags); |
Jamie Iles | 087809f | 2011-01-21 14:11:52 +0000 | [diff] [blame] | 405 | return; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 406 | } |
Jamie Iles | 087809f | 2011-01-21 14:11:52 +0000 | [diff] [blame] | 407 | |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 408 | if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags)) { |
| 409 | dev_vdbg(chan2dev(&dwc->chan), "%s: soft LLP mode\n", __func__); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 410 | spin_unlock_irqrestore(&dwc->lock, flags); |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 411 | return; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 412 | } |
| 413 | |
Andy Shevchenko | 5a87f0e | 2014-01-13 14:04:50 +0200 | [diff] [blame] | 414 | dev_vdbg(chan2dev(&dwc->chan), "%s: llp=%pad\n", __func__, &llp); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 415 | |
| 416 | list_for_each_entry_safe(desc, _desc, &dwc->active_list, desc_node) { |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 417 | /* Initial residue value */ |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 418 | dwc->residue = desc->total_len; |
| 419 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 420 | /* Check first descriptors addr */ |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 421 | if (desc->txd.phys == DWC_LLP_LOC(llp)) { |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 422 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 423 | return; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 424 | } |
Viresh Kumar | 84adccf | 2011-03-24 11:32:15 +0530 | [diff] [blame] | 425 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 426 | /* Check first descriptors llp */ |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 427 | if (lli_read(desc, llp) == llp) { |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 428 | /* This one is currently in progress */ |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 429 | dwc->residue -= dwc_get_sent(dwc); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 430 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 431 | return; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 432 | } |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 433 | |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 434 | dwc->residue -= desc->len; |
| 435 | list_for_each_entry(child, &desc->tx_list, desc_node) { |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 436 | if (lli_read(child, llp) == llp) { |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 437 | /* Currently in progress */ |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 438 | dwc->residue -= dwc_get_sent(dwc); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 439 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 440 | return; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 441 | } |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 442 | dwc->residue -= child->len; |
| 443 | } |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 444 | |
| 445 | /* |
| 446 | * No descriptors so far seem to be in progress, i.e. |
| 447 | * this one must be done. |
| 448 | */ |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 449 | spin_unlock_irqrestore(&dwc->lock, flags); |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 450 | dwc_descriptor_complete(dwc, desc, true); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 451 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 452 | } |
| 453 | |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 454 | dev_err(chan2dev(&dwc->chan), |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 455 | "BUG: All descriptors done, but channel not idle!\n"); |
| 456 | |
| 457 | /* Try to continue after resetting the channel... */ |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 458 | dwc_chan_disable(dw, dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 459 | |
Andy Shevchenko | e7637c6 | 2014-06-18 12:15:36 +0300 | [diff] [blame] | 460 | dwc_dostart_first_queued(dwc); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 461 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 462 | } |
| 463 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 464 | static inline void dwc_dump_lli(struct dw_dma_chan *dwc, struct dw_desc *desc) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 465 | { |
Andy Shevchenko | 21d43f4 | 2012-10-18 17:34:09 +0300 | [diff] [blame] | 466 | dev_crit(chan2dev(&dwc->chan), " desc: s0x%x d0x%x l0x%x c0x%x:%x\n", |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 467 | lli_read(desc, sar), |
| 468 | lli_read(desc, dar), |
| 469 | lli_read(desc, llp), |
| 470 | lli_read(desc, ctlhi), |
| 471 | lli_read(desc, ctllo)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 472 | } |
| 473 | |
| 474 | static void dwc_handle_error(struct dw_dma *dw, struct dw_dma_chan *dwc) |
| 475 | { |
| 476 | struct dw_desc *bad_desc; |
| 477 | struct dw_desc *child; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 478 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 479 | |
| 480 | dwc_scan_descriptors(dw, dwc); |
| 481 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 482 | spin_lock_irqsave(&dwc->lock, flags); |
| 483 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 484 | /* |
| 485 | * The descriptor currently at the head of the active list is |
| 486 | * borked. Since we don't have any way to report errors, we'll |
| 487 | * just have to scream loudly and try to carry on. |
| 488 | */ |
| 489 | bad_desc = dwc_first_active(dwc); |
| 490 | list_del_init(&bad_desc->desc_node); |
Viresh Kumar | f336e42 | 2011-03-03 15:47:16 +0530 | [diff] [blame] | 491 | list_move(dwc->queue.next, dwc->active_list.prev); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 492 | |
| 493 | /* Clear the error flag and try to restart the controller */ |
| 494 | dma_writel(dw, CLEAR.ERROR, dwc->mask); |
| 495 | if (!list_empty(&dwc->active_list)) |
| 496 | dwc_dostart(dwc, dwc_first_active(dwc)); |
| 497 | |
| 498 | /* |
Andy Shevchenko | ba84bd7 | 2012-10-18 17:34:11 +0300 | [diff] [blame] | 499 | * WARN may seem harsh, but since this only happens |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 500 | * when someone submits a bad physical address in a |
| 501 | * descriptor, we should consider ourselves lucky that the |
| 502 | * controller flagged an error instead of scribbling over |
| 503 | * random memory locations. |
| 504 | */ |
Andy Shevchenko | ba84bd7 | 2012-10-18 17:34:11 +0300 | [diff] [blame] | 505 | dev_WARN(chan2dev(&dwc->chan), "Bad descriptor submitted for DMA!\n" |
| 506 | " cookie: %d\n", bad_desc->txd.cookie); |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 507 | dwc_dump_lli(dwc, bad_desc); |
Dan Williams | e0bd0f8 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 508 | list_for_each_entry(child, &bad_desc->tx_list, desc_node) |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 509 | dwc_dump_lli(dwc, child); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 510 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 511 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 512 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 513 | /* Pretend the descriptor completed successfully */ |
Viresh Kumar | 5fedefb | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 514 | dwc_descriptor_complete(dwc, bad_desc, true); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 515 | } |
| 516 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 517 | /* --------------------- Cyclic DMA API extensions -------------------- */ |
| 518 | |
Denis Efremov | 8004cbb | 2013-05-09 13:19:40 +0400 | [diff] [blame] | 519 | dma_addr_t dw_dma_get_src_addr(struct dma_chan *chan) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 520 | { |
| 521 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 522 | return channel_readl(dwc, SAR); |
| 523 | } |
| 524 | EXPORT_SYMBOL(dw_dma_get_src_addr); |
| 525 | |
Denis Efremov | 8004cbb | 2013-05-09 13:19:40 +0400 | [diff] [blame] | 526 | dma_addr_t dw_dma_get_dst_addr(struct dma_chan *chan) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 527 | { |
| 528 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 529 | return channel_readl(dwc, DAR); |
| 530 | } |
| 531 | EXPORT_SYMBOL(dw_dma_get_dst_addr); |
| 532 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 533 | /* Called with dwc->lock held and all DMAC interrupts disabled */ |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 534 | static void dwc_handle_cyclic(struct dw_dma *dw, struct dw_dma_chan *dwc, |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 535 | u32 status_block, u32 status_err, u32 status_xfer) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 536 | { |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 537 | unsigned long flags; |
| 538 | |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 539 | if (status_block & dwc->mask) { |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 540 | void (*callback)(void *param); |
| 541 | void *callback_param; |
| 542 | |
| 543 | dev_vdbg(chan2dev(&dwc->chan), "new cyclic period llp 0x%08x\n", |
| 544 | channel_readl(dwc, LLP)); |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 545 | dma_writel(dw, CLEAR.BLOCK, dwc->mask); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 546 | |
| 547 | callback = dwc->cdesc->period_callback; |
| 548 | callback_param = dwc->cdesc->period_callback_param; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 549 | |
| 550 | if (callback) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 551 | callback(callback_param); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 552 | } |
| 553 | |
| 554 | /* |
| 555 | * Error and transfer complete are highly unlikely, and will most |
| 556 | * likely be due to a configuration error by the user. |
| 557 | */ |
| 558 | if (unlikely(status_err & dwc->mask) || |
| 559 | unlikely(status_xfer & dwc->mask)) { |
Andy Shevchenko | 7794e5b | 2016-03-18 16:24:48 +0200 | [diff] [blame] | 560 | unsigned int i; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 561 | |
Andy Shevchenko | fc61f6b | 2014-01-13 14:04:49 +0200 | [diff] [blame] | 562 | dev_err(chan2dev(&dwc->chan), |
| 563 | "cyclic DMA unexpected %s interrupt, stopping DMA transfer\n", |
| 564 | status_xfer ? "xfer" : "error"); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 565 | |
| 566 | spin_lock_irqsave(&dwc->lock, flags); |
| 567 | |
Andy Shevchenko | 1d45543 | 2012-06-19 13:34:03 +0300 | [diff] [blame] | 568 | dwc_dump_chan_regs(dwc); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 569 | |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 570 | dwc_chan_disable(dw, dwc); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 571 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 572 | /* Make sure DMA does not restart by loading a new list */ |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 573 | channel_writel(dwc, LLP, 0); |
| 574 | channel_writel(dwc, CTL_LO, 0); |
| 575 | channel_writel(dwc, CTL_HI, 0); |
| 576 | |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 577 | dma_writel(dw, CLEAR.BLOCK, dwc->mask); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 578 | dma_writel(dw, CLEAR.ERROR, dwc->mask); |
| 579 | dma_writel(dw, CLEAR.XFER, dwc->mask); |
| 580 | |
| 581 | for (i = 0; i < dwc->cdesc->periods; i++) |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 582 | dwc_dump_lli(dwc, dwc->cdesc->desc[i]); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 583 | |
| 584 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 585 | } |
Andy Shevchenko | ee1cdcd | 2016-02-10 15:59:42 +0200 | [diff] [blame] | 586 | |
| 587 | /* Re-enable interrupts */ |
| 588 | channel_set_bit(dw, MASK.BLOCK, dwc->mask); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 589 | } |
| 590 | |
| 591 | /* ------------------------------------------------------------------------- */ |
| 592 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 593 | static void dw_dma_tasklet(unsigned long data) |
| 594 | { |
| 595 | struct dw_dma *dw = (struct dw_dma *)data; |
| 596 | struct dw_dma_chan *dwc; |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 597 | u32 status_block; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 598 | u32 status_xfer; |
| 599 | u32 status_err; |
Andy Shevchenko | 7794e5b | 2016-03-18 16:24:48 +0200 | [diff] [blame] | 600 | unsigned int i; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 601 | |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 602 | status_block = dma_readl(dw, RAW.BLOCK); |
Haavard Skinnemoen | 7fe7b2f | 2008-10-03 15:23:46 -0700 | [diff] [blame] | 603 | status_xfer = dma_readl(dw, RAW.XFER); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 604 | status_err = dma_readl(dw, RAW.ERROR); |
| 605 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 606 | dev_vdbg(dw->dma.dev, "%s: status_err=%x\n", __func__, status_err); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 607 | |
| 608 | for (i = 0; i < dw->dma.chancnt; i++) { |
| 609 | dwc = &dw->chan[i]; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 610 | if (test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 611 | dwc_handle_cyclic(dw, dwc, status_block, status_err, |
| 612 | status_xfer); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 613 | else if (status_err & (1 << i)) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 614 | dwc_handle_error(dw, dwc); |
Andy Shevchenko | 77bcc497 | 2013-01-18 14:14:15 +0200 | [diff] [blame] | 615 | else if (status_xfer & (1 << i)) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 616 | dwc_scan_descriptors(dw, dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 617 | } |
| 618 | |
Andy Shevchenko | ee1cdcd | 2016-02-10 15:59:42 +0200 | [diff] [blame] | 619 | /* Re-enable interrupts */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 620 | channel_set_bit(dw, MASK.XFER, dw->all_chan_mask); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 621 | channel_set_bit(dw, MASK.ERROR, dw->all_chan_mask); |
| 622 | } |
| 623 | |
| 624 | static irqreturn_t dw_dma_interrupt(int irq, void *dev_id) |
| 625 | { |
| 626 | struct dw_dma *dw = dev_id; |
Andy Shevchenko | 02a21b7 | 2015-12-04 23:49:24 +0200 | [diff] [blame] | 627 | u32 status; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 628 | |
Andy Shevchenko | 02a21b7 | 2015-12-04 23:49:24 +0200 | [diff] [blame] | 629 | /* Check if we have any interrupt from the DMAC which is not in use */ |
| 630 | if (!dw->in_use) |
| 631 | return IRQ_NONE; |
| 632 | |
| 633 | status = dma_readl(dw, STATUS_INT); |
Andy Shevchenko | 3783cef | 2013-07-15 15:04:39 +0300 | [diff] [blame] | 634 | dev_vdbg(dw->dma.dev, "%s: status=0x%x\n", __func__, status); |
| 635 | |
| 636 | /* Check if we have any interrupt from the DMAC */ |
Andy Shevchenko | 02a21b7 | 2015-12-04 23:49:24 +0200 | [diff] [blame] | 637 | if (!status) |
Andy Shevchenko | 3783cef | 2013-07-15 15:04:39 +0300 | [diff] [blame] | 638 | return IRQ_NONE; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 639 | |
| 640 | /* |
| 641 | * Just disable the interrupts. We'll turn them back on in the |
| 642 | * softirq handler. |
| 643 | */ |
| 644 | channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask); |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 645 | channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 646 | channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask); |
| 647 | |
| 648 | status = dma_readl(dw, STATUS_INT); |
| 649 | if (status) { |
| 650 | dev_err(dw->dma.dev, |
| 651 | "BUG: Unexpected interrupts pending: 0x%x\n", |
| 652 | status); |
| 653 | |
| 654 | /* Try to recover */ |
| 655 | channel_clear_bit(dw, MASK.XFER, (1 << 8) - 1); |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 656 | channel_clear_bit(dw, MASK.BLOCK, (1 << 8) - 1); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 657 | channel_clear_bit(dw, MASK.SRC_TRAN, (1 << 8) - 1); |
| 658 | channel_clear_bit(dw, MASK.DST_TRAN, (1 << 8) - 1); |
| 659 | channel_clear_bit(dw, MASK.ERROR, (1 << 8) - 1); |
| 660 | } |
| 661 | |
| 662 | tasklet_schedule(&dw->tasklet); |
| 663 | |
| 664 | return IRQ_HANDLED; |
| 665 | } |
| 666 | |
| 667 | /*----------------------------------------------------------------------*/ |
| 668 | |
| 669 | static dma_cookie_t dwc_tx_submit(struct dma_async_tx_descriptor *tx) |
| 670 | { |
| 671 | struct dw_desc *desc = txd_to_dw_desc(tx); |
| 672 | struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan); |
| 673 | dma_cookie_t cookie; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 674 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 675 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 676 | spin_lock_irqsave(&dwc->lock, flags); |
Russell King - ARM Linux | 884485e | 2012-03-06 22:34:46 +0000 | [diff] [blame] | 677 | cookie = dma_cookie_assign(tx); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 678 | |
| 679 | /* |
| 680 | * REVISIT: We should attempt to chain as many descriptors as |
| 681 | * possible, perhaps even appending to those already submitted |
| 682 | * for DMA. But this is hard to do in a race-free manner. |
| 683 | */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 684 | |
Andy Shevchenko | dd8ecfca | 2014-06-18 12:15:38 +0300 | [diff] [blame] | 685 | dev_vdbg(chan2dev(tx->chan), "%s: queued %u\n", __func__, desc->txd.cookie); |
| 686 | list_add_tail(&desc->desc_node, &dwc->queue); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 687 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 688 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 689 | |
| 690 | return cookie; |
| 691 | } |
| 692 | |
| 693 | static struct dma_async_tx_descriptor * |
| 694 | dwc_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, |
| 695 | size_t len, unsigned long flags) |
| 696 | { |
| 697 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Arnd Bergmann | f776076 | 2013-03-26 16:53:57 +0200 | [diff] [blame] | 698 | struct dw_dma *dw = to_dw_dma(chan->device); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 699 | struct dw_desc *desc; |
| 700 | struct dw_desc *first; |
| 701 | struct dw_desc *prev; |
| 702 | size_t xfer_count; |
| 703 | size_t offset; |
| 704 | unsigned int src_width; |
| 705 | unsigned int dst_width; |
Andy Shevchenko | 3d4f860 | 2012-10-01 13:06:25 +0300 | [diff] [blame] | 706 | unsigned int data_width; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 707 | u32 ctllo; |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 708 | u8 lms = DWC_LLP_LMS(dwc->m_master); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 709 | |
Andy Shevchenko | 2f45d61 | 2012-06-19 13:34:02 +0300 | [diff] [blame] | 710 | dev_vdbg(chan2dev(chan), |
Andy Shevchenko | 5a87f0e | 2014-01-13 14:04:50 +0200 | [diff] [blame] | 711 | "%s: d%pad s%pad l0x%zx f0x%lx\n", __func__, |
| 712 | &dest, &src, len, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 713 | |
| 714 | if (unlikely(!len)) { |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 715 | dev_dbg(chan2dev(chan), "%s: length is zero!\n", __func__); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 716 | return NULL; |
| 717 | } |
| 718 | |
Andy Shevchenko | 0fdb567 | 2013-01-10 10:53:03 +0200 | [diff] [blame] | 719 | dwc->direction = DMA_MEM_TO_MEM; |
| 720 | |
Andy Shevchenko | c422025 | 2016-03-18 16:24:41 +0200 | [diff] [blame] | 721 | data_width = dw->data_width[dwc->m_master]; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 722 | |
Andy Shevchenko | 3d4f860 | 2012-10-01 13:06:25 +0300 | [diff] [blame] | 723 | src_width = dst_width = min_t(unsigned int, data_width, |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 724 | dwc_fast_ffs(src | dest | len)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 725 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 726 | ctllo = DWC_DEFAULT_CTLLO(chan) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 727 | | DWC_CTLL_DST_WIDTH(dst_width) |
| 728 | | DWC_CTLL_SRC_WIDTH(src_width) |
| 729 | | DWC_CTLL_DST_INC |
| 730 | | DWC_CTLL_SRC_INC |
| 731 | | DWC_CTLL_FC_M2M; |
| 732 | prev = first = NULL; |
| 733 | |
| 734 | for (offset = 0; offset < len; offset += xfer_count << src_width) { |
| 735 | xfer_count = min_t(size_t, (len - offset) >> src_width, |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 736 | dwc->block_size); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 737 | |
| 738 | desc = dwc_desc_get(dwc); |
| 739 | if (!desc) |
| 740 | goto err_desc_get; |
| 741 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 742 | lli_write(desc, sar, src + offset); |
| 743 | lli_write(desc, dar, dest + offset); |
| 744 | lli_write(desc, ctllo, ctllo); |
| 745 | lli_write(desc, ctlhi, xfer_count); |
Andy Shevchenko | 176dcec | 2013-01-25 11:48:02 +0200 | [diff] [blame] | 746 | desc->len = xfer_count << src_width; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 747 | |
| 748 | if (!first) { |
| 749 | first = desc; |
| 750 | } else { |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 751 | lli_write(prev, llp, desc->txd.phys | lms); |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 752 | list_add_tail(&desc->desc_node, &first->tx_list); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 753 | } |
| 754 | prev = desc; |
| 755 | } |
| 756 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 757 | if (flags & DMA_PREP_INTERRUPT) |
| 758 | /* Trigger interrupt after last block */ |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 759 | lli_set(prev, ctllo, DWC_CTLL_INT_EN); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 760 | |
| 761 | prev->lli.llp = 0; |
Mans Rullgard | a3e5579 | 2016-03-18 16:24:45 +0200 | [diff] [blame] | 762 | lli_clear(prev, ctllo, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 763 | first->txd.flags = flags; |
Andy Shevchenko | 30d38a3 | 2013-01-25 11:48:01 +0200 | [diff] [blame] | 764 | first->total_len = len; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 765 | |
| 766 | return &first->txd; |
| 767 | |
| 768 | err_desc_get: |
| 769 | dwc_desc_put(dwc, first); |
| 770 | return NULL; |
| 771 | } |
| 772 | |
| 773 | static struct dma_async_tx_descriptor * |
| 774 | dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 775 | unsigned int sg_len, enum dma_transfer_direction direction, |
Alexandre Bounine | 185ecb5 | 2012-03-08 15:35:13 -0500 | [diff] [blame] | 776 | unsigned long flags, void *context) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 777 | { |
| 778 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Arnd Bergmann | f776076 | 2013-03-26 16:53:57 +0200 | [diff] [blame] | 779 | struct dw_dma *dw = to_dw_dma(chan->device); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 780 | struct dma_slave_config *sconfig = &dwc->dma_sconfig; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 781 | struct dw_desc *prev; |
| 782 | struct dw_desc *first; |
| 783 | u32 ctllo; |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 784 | u8 lms = DWC_LLP_LMS(dwc->m_master); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 785 | dma_addr_t reg; |
| 786 | unsigned int reg_width; |
| 787 | unsigned int mem_width; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 788 | unsigned int data_width; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 789 | unsigned int i; |
| 790 | struct scatterlist *sg; |
| 791 | size_t total_len = 0; |
| 792 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 793 | dev_vdbg(chan2dev(chan), "%s\n", __func__); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 794 | |
Andy Shevchenko | 495aea4 | 2013-01-10 11:11:41 +0200 | [diff] [blame] | 795 | if (unlikely(!is_slave_direction(direction) || !sg_len)) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 796 | return NULL; |
| 797 | |
Andy Shevchenko | 0fdb567 | 2013-01-10 10:53:03 +0200 | [diff] [blame] | 798 | dwc->direction = direction; |
| 799 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 800 | prev = first = NULL; |
| 801 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 802 | switch (direction) { |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 803 | case DMA_MEM_TO_DEV: |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 804 | reg_width = __ffs(sconfig->dst_addr_width); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 805 | reg = sconfig->dst_addr; |
| 806 | ctllo = (DWC_DEFAULT_CTLLO(chan) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 807 | | DWC_CTLL_DST_WIDTH(reg_width) |
| 808 | | DWC_CTLL_DST_FIX |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 809 | | DWC_CTLL_SRC_INC); |
| 810 | |
| 811 | ctllo |= sconfig->device_fc ? DWC_CTLL_FC(DW_DMA_FC_P_M2P) : |
| 812 | DWC_CTLL_FC(DW_DMA_FC_D_M2P); |
| 813 | |
Andy Shevchenko | c422025 | 2016-03-18 16:24:41 +0200 | [diff] [blame] | 814 | data_width = dw->data_width[dwc->m_master]; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 815 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 816 | for_each_sg(sgl, sg, sg_len, i) { |
| 817 | struct dw_desc *desc; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 818 | u32 len, dlen, mem; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 819 | |
Lars-Peter Clausen | cbb796c | 2012-04-25 20:50:51 +0200 | [diff] [blame] | 820 | mem = sg_dma_address(sg); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 821 | len = sg_dma_len(sg); |
Viresh Kumar | 6bc711f | 2012-02-01 16:12:25 +0530 | [diff] [blame] | 822 | |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 823 | mem_width = min_t(unsigned int, |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 824 | data_width, dwc_fast_ffs(mem | len)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 825 | |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 826 | slave_sg_todev_fill_desc: |
| 827 | desc = dwc_desc_get(dwc); |
Jarkko Nikula | b260722 | 2015-03-10 11:37:24 +0200 | [diff] [blame] | 828 | if (!desc) |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 829 | goto err_desc_get; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 830 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 831 | lli_write(desc, sar, mem); |
| 832 | lli_write(desc, dar, reg); |
| 833 | lli_write(desc, ctllo, ctllo | DWC_CTLL_SRC_WIDTH(mem_width)); |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 834 | if ((len >> mem_width) > dwc->block_size) { |
| 835 | dlen = dwc->block_size << mem_width; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 836 | mem += dlen; |
| 837 | len -= dlen; |
| 838 | } else { |
| 839 | dlen = len; |
| 840 | len = 0; |
| 841 | } |
| 842 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 843 | lli_write(desc, ctlhi, dlen >> mem_width); |
Andy Shevchenko | 176dcec | 2013-01-25 11:48:02 +0200 | [diff] [blame] | 844 | desc->len = dlen; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 845 | |
| 846 | if (!first) { |
| 847 | first = desc; |
| 848 | } else { |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 849 | lli_write(prev, llp, desc->txd.phys | lms); |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 850 | list_add_tail(&desc->desc_node, &first->tx_list); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 851 | } |
| 852 | prev = desc; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 853 | total_len += dlen; |
| 854 | |
| 855 | if (len) |
| 856 | goto slave_sg_todev_fill_desc; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 857 | } |
| 858 | break; |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 859 | case DMA_DEV_TO_MEM: |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 860 | reg_width = __ffs(sconfig->src_addr_width); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 861 | reg = sconfig->src_addr; |
| 862 | ctllo = (DWC_DEFAULT_CTLLO(chan) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 863 | | DWC_CTLL_SRC_WIDTH(reg_width) |
| 864 | | DWC_CTLL_DST_INC |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 865 | | DWC_CTLL_SRC_FIX); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 866 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 867 | ctllo |= sconfig->device_fc ? DWC_CTLL_FC(DW_DMA_FC_P_P2M) : |
| 868 | DWC_CTLL_FC(DW_DMA_FC_D_P2M); |
| 869 | |
Andy Shevchenko | c422025 | 2016-03-18 16:24:41 +0200 | [diff] [blame] | 870 | data_width = dw->data_width[dwc->m_master]; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 871 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 872 | for_each_sg(sgl, sg, sg_len, i) { |
| 873 | struct dw_desc *desc; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 874 | u32 len, dlen, mem; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 875 | |
Lars-Peter Clausen | cbb796c | 2012-04-25 20:50:51 +0200 | [diff] [blame] | 876 | mem = sg_dma_address(sg); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 877 | len = sg_dma_len(sg); |
Viresh Kumar | 6bc711f | 2012-02-01 16:12:25 +0530 | [diff] [blame] | 878 | |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 879 | mem_width = min_t(unsigned int, |
Andy Shevchenko | 3941667 | 2015-09-28 18:57:04 +0300 | [diff] [blame] | 880 | data_width, dwc_fast_ffs(mem | len)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 881 | |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 882 | slave_sg_fromdev_fill_desc: |
| 883 | desc = dwc_desc_get(dwc); |
Jarkko Nikula | b260722 | 2015-03-10 11:37:24 +0200 | [diff] [blame] | 884 | if (!desc) |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 885 | goto err_desc_get; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 886 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 887 | lli_write(desc, sar, reg); |
| 888 | lli_write(desc, dar, mem); |
| 889 | lli_write(desc, ctllo, ctllo | DWC_CTLL_DST_WIDTH(mem_width)); |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 890 | if ((len >> reg_width) > dwc->block_size) { |
| 891 | dlen = dwc->block_size << reg_width; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 892 | mem += dlen; |
| 893 | len -= dlen; |
| 894 | } else { |
| 895 | dlen = len; |
| 896 | len = 0; |
| 897 | } |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 898 | lli_write(desc, ctlhi, dlen >> reg_width); |
Andy Shevchenko | 176dcec | 2013-01-25 11:48:02 +0200 | [diff] [blame] | 899 | desc->len = dlen; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 900 | |
| 901 | if (!first) { |
| 902 | first = desc; |
| 903 | } else { |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 904 | lli_write(prev, llp, desc->txd.phys | lms); |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 905 | list_add_tail(&desc->desc_node, &first->tx_list); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 906 | } |
| 907 | prev = desc; |
Viresh Kumar | 69dc14b | 2011-04-18 14:54:56 +0530 | [diff] [blame] | 908 | total_len += dlen; |
| 909 | |
| 910 | if (len) |
| 911 | goto slave_sg_fromdev_fill_desc; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 912 | } |
| 913 | break; |
| 914 | default: |
| 915 | return NULL; |
| 916 | } |
| 917 | |
| 918 | if (flags & DMA_PREP_INTERRUPT) |
| 919 | /* Trigger interrupt after last block */ |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 920 | lli_set(prev, ctllo, DWC_CTLL_INT_EN); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 921 | |
| 922 | prev->lli.llp = 0; |
Mans Rullgard | a3e5579 | 2016-03-18 16:24:45 +0200 | [diff] [blame] | 923 | lli_clear(prev, ctllo, DWC_CTLL_LLP_D_EN | DWC_CTLL_LLP_S_EN); |
Andy Shevchenko | 30d38a3 | 2013-01-25 11:48:01 +0200 | [diff] [blame] | 924 | first->total_len = total_len; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 925 | |
| 926 | return &first->txd; |
| 927 | |
| 928 | err_desc_get: |
Jarkko Nikula | b260722 | 2015-03-10 11:37:24 +0200 | [diff] [blame] | 929 | dev_err(chan2dev(chan), |
| 930 | "not enough descriptors available. Direction %d\n", direction); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 931 | dwc_desc_put(dwc, first); |
| 932 | return NULL; |
| 933 | } |
| 934 | |
Andy Shevchenko | 4d130de | 2014-08-19 20:29:16 +0300 | [diff] [blame] | 935 | bool dw_dma_filter(struct dma_chan *chan, void *param) |
| 936 | { |
| 937 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 938 | struct dw_dma_slave *dws = param; |
| 939 | |
Andy Shevchenko | 3fe6409 | 2016-04-08 16:22:17 +0300 | [diff] [blame] | 940 | if (dws->dma_dev != chan->device->dev) |
Andy Shevchenko | 4d130de | 2014-08-19 20:29:16 +0300 | [diff] [blame] | 941 | return false; |
| 942 | |
| 943 | /* We have to copy data since dws can be temporary storage */ |
| 944 | |
| 945 | dwc->src_id = dws->src_id; |
| 946 | dwc->dst_id = dws->dst_id; |
| 947 | |
Andy Shevchenko | c422025 | 2016-03-18 16:24:41 +0200 | [diff] [blame] | 948 | dwc->m_master = dws->m_master; |
| 949 | dwc->p_master = dws->p_master; |
Andy Shevchenko | 4d130de | 2014-08-19 20:29:16 +0300 | [diff] [blame] | 950 | |
| 951 | return true; |
| 952 | } |
| 953 | EXPORT_SYMBOL_GPL(dw_dma_filter); |
| 954 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 955 | /* |
| 956 | * Fix sconfig's burst size according to dw_dmac. We need to convert them as: |
| 957 | * 1 -> 0, 4 -> 1, 8 -> 2, 16 -> 3. |
| 958 | * |
| 959 | * NOTE: burst size 2 is not supported by controller. |
| 960 | * |
| 961 | * This can be done by finding least significant bit set: n & (n - 1) |
| 962 | */ |
| 963 | static inline void convert_burst(u32 *maxburst) |
| 964 | { |
| 965 | if (*maxburst > 1) |
| 966 | *maxburst = fls(*maxburst) - 2; |
| 967 | else |
| 968 | *maxburst = 0; |
| 969 | } |
| 970 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 971 | static int dwc_config(struct dma_chan *chan, struct dma_slave_config *sconfig) |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 972 | { |
| 973 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 974 | |
Andy Shevchenko | 495aea4 | 2013-01-10 11:11:41 +0200 | [diff] [blame] | 975 | /* Check if chan will be configured for slave transfers */ |
| 976 | if (!is_slave_direction(sconfig->direction)) |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 977 | return -EINVAL; |
| 978 | |
| 979 | memcpy(&dwc->dma_sconfig, sconfig, sizeof(*sconfig)); |
Andy Shevchenko | 0fdb567 | 2013-01-10 10:53:03 +0200 | [diff] [blame] | 980 | dwc->direction = sconfig->direction; |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 981 | |
| 982 | convert_burst(&dwc->dma_sconfig.src_maxburst); |
| 983 | convert_burst(&dwc->dma_sconfig.dst_maxburst); |
| 984 | |
| 985 | return 0; |
| 986 | } |
| 987 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 988 | static int dwc_pause(struct dma_chan *chan) |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 989 | { |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 990 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 991 | unsigned long flags; |
| 992 | unsigned int count = 20; /* timeout iterations */ |
| 993 | u32 cfglo; |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 994 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 995 | spin_lock_irqsave(&dwc->lock, flags); |
| 996 | |
| 997 | cfglo = channel_readl(dwc, CFG_LO); |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 998 | channel_writel(dwc, CFG_LO, cfglo | DWC_CFGL_CH_SUSP); |
Andy Shevchenko | 123b69a | 2013-03-21 11:49:17 +0200 | [diff] [blame] | 999 | while (!(channel_readl(dwc, CFG_LO) & DWC_CFGL_FIFO_EMPTY) && count--) |
| 1000 | udelay(2); |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 1001 | |
Andy Shevchenko | 5e09f98 | 2016-03-18 16:24:51 +0200 | [diff] [blame] | 1002 | set_bit(DW_DMA_IS_PAUSED, &dwc->flags); |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1003 | |
| 1004 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 1005 | |
| 1006 | return 0; |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 1007 | } |
| 1008 | |
| 1009 | static inline void dwc_chan_resume(struct dw_dma_chan *dwc) |
| 1010 | { |
| 1011 | u32 cfglo = channel_readl(dwc, CFG_LO); |
| 1012 | |
| 1013 | channel_writel(dwc, CFG_LO, cfglo & ~DWC_CFGL_CH_SUSP); |
| 1014 | |
Andy Shevchenko | 5e09f98 | 2016-03-18 16:24:51 +0200 | [diff] [blame] | 1015 | clear_bit(DW_DMA_IS_PAUSED, &dwc->flags); |
Andy Shevchenko | 21fe3c5 | 2013-01-09 10:17:14 +0200 | [diff] [blame] | 1016 | } |
| 1017 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1018 | static int dwc_resume(struct dma_chan *chan) |
| 1019 | { |
| 1020 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1021 | unsigned long flags; |
| 1022 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1023 | spin_lock_irqsave(&dwc->lock, flags); |
| 1024 | |
Andy Shevchenko | 5e09f98 | 2016-03-18 16:24:51 +0200 | [diff] [blame] | 1025 | if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags)) |
| 1026 | dwc_chan_resume(dwc); |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1027 | |
| 1028 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 1029 | |
| 1030 | return 0; |
| 1031 | } |
| 1032 | |
| 1033 | static int dwc_terminate_all(struct dma_chan *chan) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1034 | { |
| 1035 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1036 | struct dw_dma *dw = to_dw_dma(chan->device); |
| 1037 | struct dw_desc *desc, *_desc; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1038 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1039 | LIST_HEAD(list); |
| 1040 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1041 | spin_lock_irqsave(&dwc->lock, flags); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1042 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1043 | clear_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1044 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1045 | dwc_chan_disable(dw, dwc); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1046 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1047 | dwc_chan_resume(dwc); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1048 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1049 | /* active_list entries will end up before queued entries */ |
| 1050 | list_splice_init(&dwc->queue, &list); |
| 1051 | list_splice_init(&dwc->active_list, &list); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1052 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1053 | spin_unlock_irqrestore(&dwc->lock, flags); |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1054 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1055 | /* Flush all pending and queued descriptors */ |
| 1056 | list_for_each_entry_safe(desc, _desc, &list, desc_node) |
| 1057 | dwc_descriptor_complete(dwc, desc, false); |
Linus Walleij | c3635c7 | 2010-03-26 16:44:01 -0700 | [diff] [blame] | 1058 | |
Linus Walleij | c3635c7 | 2010-03-26 16:44:01 -0700 | [diff] [blame] | 1059 | return 0; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1060 | } |
| 1061 | |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 1062 | static inline u32 dwc_get_residue(struct dw_dma_chan *dwc) |
| 1063 | { |
| 1064 | unsigned long flags; |
| 1065 | u32 residue; |
| 1066 | |
| 1067 | spin_lock_irqsave(&dwc->lock, flags); |
| 1068 | |
| 1069 | residue = dwc->residue; |
| 1070 | if (test_bit(DW_DMA_IS_SOFT_LLP, &dwc->flags) && residue) |
| 1071 | residue -= dwc_get_sent(dwc); |
| 1072 | |
| 1073 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 1074 | return residue; |
| 1075 | } |
| 1076 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1077 | static enum dma_status |
Linus Walleij | 0793448 | 2010-03-26 16:50:49 -0700 | [diff] [blame] | 1078 | dwc_tx_status(struct dma_chan *chan, |
| 1079 | dma_cookie_t cookie, |
| 1080 | struct dma_tx_state *txstate) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1081 | { |
| 1082 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Russell King - ARM Linux | 96a2af4 | 2012-03-06 22:35:27 +0000 | [diff] [blame] | 1083 | enum dma_status ret; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1084 | |
Russell King - ARM Linux | 96a2af4 | 2012-03-06 22:35:27 +0000 | [diff] [blame] | 1085 | ret = dma_cookie_status(chan, cookie, txstate); |
Vinod Koul | 2c40410 | 2013-10-16 13:41:15 +0530 | [diff] [blame] | 1086 | if (ret == DMA_COMPLETE) |
Andy Shevchenko | 12381dc | 2013-07-15 15:04:40 +0300 | [diff] [blame] | 1087 | return ret; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1088 | |
Andy Shevchenko | 12381dc | 2013-07-15 15:04:40 +0300 | [diff] [blame] | 1089 | dwc_scan_descriptors(to_dw_dma(chan->device), dwc); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1090 | |
Andy Shevchenko | 12381dc | 2013-07-15 15:04:40 +0300 | [diff] [blame] | 1091 | ret = dma_cookie_status(chan, cookie, txstate); |
Vinod Koul | 2c40410 | 2013-10-16 13:41:15 +0530 | [diff] [blame] | 1092 | if (ret != DMA_COMPLETE) |
Andy Shevchenko | 4702d52 | 2013-01-25 11:48:03 +0200 | [diff] [blame] | 1093 | dma_set_residue(txstate, dwc_get_residue(dwc)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1094 | |
Andy Shevchenko | 5e09f98 | 2016-03-18 16:24:51 +0200 | [diff] [blame] | 1095 | if (test_bit(DW_DMA_IS_PAUSED, &dwc->flags) && ret == DMA_IN_PROGRESS) |
Linus Walleij | a7c57cf | 2011-04-19 08:31:32 +0800 | [diff] [blame] | 1096 | return DMA_PAUSED; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1097 | |
| 1098 | return ret; |
| 1099 | } |
| 1100 | |
| 1101 | static void dwc_issue_pending(struct dma_chan *chan) |
| 1102 | { |
| 1103 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Andy Shevchenko | dd8ecfca | 2014-06-18 12:15:38 +0300 | [diff] [blame] | 1104 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1105 | |
Andy Shevchenko | dd8ecfca | 2014-06-18 12:15:38 +0300 | [diff] [blame] | 1106 | spin_lock_irqsave(&dwc->lock, flags); |
| 1107 | if (list_empty(&dwc->active_list)) |
| 1108 | dwc_dostart_first_queued(dwc); |
| 1109 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1110 | } |
| 1111 | |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1112 | /*----------------------------------------------------------------------*/ |
| 1113 | |
| 1114 | static void dw_dma_off(struct dw_dma *dw) |
| 1115 | { |
Andy Shevchenko | 7794e5b | 2016-03-18 16:24:48 +0200 | [diff] [blame] | 1116 | unsigned int i; |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1117 | |
| 1118 | dma_writel(dw, CFG, 0); |
| 1119 | |
| 1120 | channel_clear_bit(dw, MASK.XFER, dw->all_chan_mask); |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 1121 | channel_clear_bit(dw, MASK.BLOCK, dw->all_chan_mask); |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1122 | channel_clear_bit(dw, MASK.SRC_TRAN, dw->all_chan_mask); |
| 1123 | channel_clear_bit(dw, MASK.DST_TRAN, dw->all_chan_mask); |
| 1124 | channel_clear_bit(dw, MASK.ERROR, dw->all_chan_mask); |
| 1125 | |
| 1126 | while (dma_readl(dw, CFG) & DW_CFG_DMA_EN) |
| 1127 | cpu_relax(); |
| 1128 | |
| 1129 | for (i = 0; i < dw->dma.chancnt; i++) |
Andy Shevchenko | 423f9cb | 2016-03-18 16:24:52 +0200 | [diff] [blame^] | 1130 | clear_bit(DW_DMA_IS_INITIALIZED, &dw->chan[i].flags); |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1131 | } |
| 1132 | |
| 1133 | static void dw_dma_on(struct dw_dma *dw) |
| 1134 | { |
| 1135 | dma_writel(dw, CFG, DW_CFG_DMA_EN); |
| 1136 | } |
| 1137 | |
Dan Williams | aa1e6f1 | 2009-01-06 11:38:17 -0700 | [diff] [blame] | 1138 | static int dwc_alloc_chan_resources(struct dma_chan *chan) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1139 | { |
| 1140 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1141 | struct dw_dma *dw = to_dw_dma(chan->device); |
| 1142 | struct dw_desc *desc; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1143 | int i; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1144 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1145 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 1146 | dev_vdbg(chan2dev(chan), "%s\n", __func__); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1147 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1148 | /* ASSERT: channel is idle */ |
| 1149 | if (dma_readl(dw, CH_EN) & dwc->mask) { |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 1150 | dev_dbg(chan2dev(chan), "DMA channel not idle?\n"); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1151 | return -EIO; |
| 1152 | } |
| 1153 | |
Russell King - ARM Linux | d3ee98cdc | 2012-03-06 22:35:47 +0000 | [diff] [blame] | 1154 | dma_cookie_init(chan); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1155 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1156 | /* |
| 1157 | * NOTE: some controllers may have additional features that we |
| 1158 | * need to initialize here, like "scatter-gather" (which |
| 1159 | * doesn't mean what you think it means), and status writeback. |
| 1160 | */ |
| 1161 | |
Andy Shevchenko | 3fe6409 | 2016-04-08 16:22:17 +0300 | [diff] [blame] | 1162 | /* |
| 1163 | * We need controller-specific data to set up slave transfers. |
| 1164 | */ |
| 1165 | if (chan->private && !dw_dma_filter(chan, chan->private)) { |
| 1166 | dev_warn(chan2dev(chan), "Wrong controller-specific data\n"); |
| 1167 | return -EINVAL; |
| 1168 | } |
| 1169 | |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1170 | /* Enable controller here if needed */ |
| 1171 | if (!dw->in_use) |
| 1172 | dw_dma_on(dw); |
| 1173 | dw->in_use |= dwc->mask; |
| 1174 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1175 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1176 | i = dwc->descs_allocated; |
| 1177 | while (dwc->descs_allocated < NR_DESCS_PER_CHANNEL) { |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1178 | dma_addr_t phys; |
| 1179 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1180 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1181 | |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1182 | desc = dma_pool_alloc(dw->desc_pool, GFP_ATOMIC, &phys); |
Andy Shevchenko | cbd6531 | 2013-01-09 10:17:11 +0200 | [diff] [blame] | 1183 | if (!desc) |
| 1184 | goto err_desc_alloc; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1185 | |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1186 | memset(desc, 0, sizeof(struct dw_desc)); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1187 | |
Dan Williams | e0bd0f8 | 2009-09-08 17:53:02 -0700 | [diff] [blame] | 1188 | INIT_LIST_HEAD(&desc->tx_list); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1189 | dma_async_tx_descriptor_init(&desc->txd, chan); |
| 1190 | desc->txd.tx_submit = dwc_tx_submit; |
| 1191 | desc->txd.flags = DMA_CTRL_ACK; |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1192 | desc->txd.phys = phys; |
Andy Shevchenko | cbd6531 | 2013-01-09 10:17:11 +0200 | [diff] [blame] | 1193 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1194 | dwc_desc_put(dwc, desc); |
| 1195 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1196 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1197 | i = ++dwc->descs_allocated; |
| 1198 | } |
| 1199 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1200 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1201 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 1202 | dev_dbg(chan2dev(chan), "%s: allocated %d descriptors\n", __func__, i); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1203 | |
| 1204 | return i; |
Andy Shevchenko | cbd6531 | 2013-01-09 10:17:11 +0200 | [diff] [blame] | 1205 | |
| 1206 | err_desc_alloc: |
Andy Shevchenko | cbd6531 | 2013-01-09 10:17:11 +0200 | [diff] [blame] | 1207 | dev_info(chan2dev(chan), "only allocated %d descriptors\n", i); |
| 1208 | |
| 1209 | return i; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1210 | } |
| 1211 | |
| 1212 | static void dwc_free_chan_resources(struct dma_chan *chan) |
| 1213 | { |
| 1214 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1215 | struct dw_dma *dw = to_dw_dma(chan->device); |
| 1216 | struct dw_desc *desc, *_desc; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1217 | unsigned long flags; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1218 | LIST_HEAD(list); |
| 1219 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 1220 | dev_dbg(chan2dev(chan), "%s: descs allocated=%u\n", __func__, |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1221 | dwc->descs_allocated); |
| 1222 | |
| 1223 | /* ASSERT: channel is idle */ |
| 1224 | BUG_ON(!list_empty(&dwc->active_list)); |
| 1225 | BUG_ON(!list_empty(&dwc->queue)); |
| 1226 | BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask); |
| 1227 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1228 | spin_lock_irqsave(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1229 | list_splice_init(&dwc->free_list, &list); |
| 1230 | dwc->descs_allocated = 0; |
Andy Shevchenko | 3fe6409 | 2016-04-08 16:22:17 +0300 | [diff] [blame] | 1231 | |
| 1232 | /* Clear custom channel configuration */ |
| 1233 | dwc->src_id = 0; |
| 1234 | dwc->dst_id = 0; |
| 1235 | |
Andy Shevchenko | c422025 | 2016-03-18 16:24:41 +0200 | [diff] [blame] | 1236 | dwc->m_master = 0; |
| 1237 | dwc->p_master = 0; |
Andy Shevchenko | 3fe6409 | 2016-04-08 16:22:17 +0300 | [diff] [blame] | 1238 | |
Andy Shevchenko | 423f9cb | 2016-03-18 16:24:52 +0200 | [diff] [blame^] | 1239 | clear_bit(DW_DMA_IS_INITIALIZED, &dwc->flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1240 | |
| 1241 | /* Disable interrupts */ |
| 1242 | channel_clear_bit(dw, MASK.XFER, dwc->mask); |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 1243 | channel_clear_bit(dw, MASK.BLOCK, dwc->mask); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1244 | channel_clear_bit(dw, MASK.ERROR, dwc->mask); |
| 1245 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1246 | spin_unlock_irqrestore(&dwc->lock, flags); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1247 | |
Andy Shevchenko | 99d9bf4 | 2014-09-23 17:18:14 +0300 | [diff] [blame] | 1248 | /* Disable controller in case it was a last user */ |
| 1249 | dw->in_use &= ~dwc->mask; |
| 1250 | if (!dw->in_use) |
| 1251 | dw_dma_off(dw); |
| 1252 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1253 | list_for_each_entry_safe(desc, _desc, &list, desc_node) { |
Dan Williams | 41d5e59 | 2009-01-06 11:38:21 -0700 | [diff] [blame] | 1254 | dev_vdbg(chan2dev(chan), " freeing descriptor %p\n", desc); |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1255 | dma_pool_free(dw->desc_pool, desc, desc->txd.phys); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1256 | } |
| 1257 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 1258 | dev_vdbg(chan2dev(chan), "%s: done\n", __func__); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1259 | } |
| 1260 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1261 | /* --------------------- Cyclic DMA API extensions -------------------- */ |
| 1262 | |
| 1263 | /** |
| 1264 | * dw_dma_cyclic_start - start the cyclic DMA transfer |
| 1265 | * @chan: the DMA channel to start |
| 1266 | * |
| 1267 | * Must be called with soft interrupts disabled. Returns zero on success or |
| 1268 | * -errno on failure. |
| 1269 | */ |
| 1270 | int dw_dma_cyclic_start(struct dma_chan *chan) |
| 1271 | { |
| 1272 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Andy Shevchenko | ee1cdcd | 2016-02-10 15:59:42 +0200 | [diff] [blame] | 1273 | struct dw_dma *dw = to_dw_dma(chan->device); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1274 | unsigned long flags; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1275 | |
| 1276 | if (!test_bit(DW_DMA_IS_CYCLIC, &dwc->flags)) { |
| 1277 | dev_err(chan2dev(&dwc->chan), "missing prep for cyclic DMA\n"); |
| 1278 | return -ENODEV; |
| 1279 | } |
| 1280 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1281 | spin_lock_irqsave(&dwc->lock, flags); |
Andy Shevchenko | ee1cdcd | 2016-02-10 15:59:42 +0200 | [diff] [blame] | 1282 | |
| 1283 | /* Enable interrupts to perform cyclic transfer */ |
| 1284 | channel_set_bit(dw, MASK.BLOCK, dwc->mask); |
| 1285 | |
Mans Rullgard | df3bb8a | 2016-01-11 13:04:28 +0000 | [diff] [blame] | 1286 | dwc_dostart(dwc, dwc->cdesc->desc[0]); |
Andy Shevchenko | ee1cdcd | 2016-02-10 15:59:42 +0200 | [diff] [blame] | 1287 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1288 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1289 | |
| 1290 | return 0; |
| 1291 | } |
| 1292 | EXPORT_SYMBOL(dw_dma_cyclic_start); |
| 1293 | |
| 1294 | /** |
| 1295 | * dw_dma_cyclic_stop - stop the cyclic DMA transfer |
| 1296 | * @chan: the DMA channel to stop |
| 1297 | * |
| 1298 | * Must be called with soft interrupts disabled. |
| 1299 | */ |
| 1300 | void dw_dma_cyclic_stop(struct dma_chan *chan) |
| 1301 | { |
| 1302 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1303 | struct dw_dma *dw = to_dw_dma(dwc->chan.device); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1304 | unsigned long flags; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1305 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1306 | spin_lock_irqsave(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1307 | |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 1308 | dwc_chan_disable(dw, dwc); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1309 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1310 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1311 | } |
| 1312 | EXPORT_SYMBOL(dw_dma_cyclic_stop); |
| 1313 | |
| 1314 | /** |
| 1315 | * dw_dma_cyclic_prep - prepare the cyclic DMA transfer |
| 1316 | * @chan: the DMA channel to prepare |
| 1317 | * @buf_addr: physical DMA address where the buffer starts |
| 1318 | * @buf_len: total number of bytes for the entire buffer |
| 1319 | * @period_len: number of bytes for each period |
| 1320 | * @direction: transfer direction, to or from device |
| 1321 | * |
| 1322 | * Must be called before trying to start the transfer. Returns a valid struct |
| 1323 | * dw_cyclic_desc if successful or an ERR_PTR(-errno) if not successful. |
| 1324 | */ |
| 1325 | struct dw_cyclic_desc *dw_dma_cyclic_prep(struct dma_chan *chan, |
| 1326 | dma_addr_t buf_addr, size_t buf_len, size_t period_len, |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 1327 | enum dma_transfer_direction direction) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1328 | { |
| 1329 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1330 | struct dma_slave_config *sconfig = &dwc->dma_sconfig; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1331 | struct dw_cyclic_desc *cdesc; |
| 1332 | struct dw_cyclic_desc *retval = NULL; |
| 1333 | struct dw_desc *desc; |
| 1334 | struct dw_desc *last = NULL; |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 1335 | u8 lms = DWC_LLP_LMS(dwc->m_master); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1336 | unsigned long was_cyclic; |
| 1337 | unsigned int reg_width; |
| 1338 | unsigned int periods; |
| 1339 | unsigned int i; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1340 | unsigned long flags; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1341 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1342 | spin_lock_irqsave(&dwc->lock, flags); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1343 | if (dwc->nollp) { |
| 1344 | spin_unlock_irqrestore(&dwc->lock, flags); |
| 1345 | dev_dbg(chan2dev(&dwc->chan), |
| 1346 | "channel doesn't support LLP transfers\n"); |
| 1347 | return ERR_PTR(-EINVAL); |
| 1348 | } |
| 1349 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1350 | if (!list_empty(&dwc->queue) || !list_empty(&dwc->active_list)) { |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1351 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1352 | dev_dbg(chan2dev(&dwc->chan), |
| 1353 | "queue and/or active list are not empty\n"); |
| 1354 | return ERR_PTR(-EBUSY); |
| 1355 | } |
| 1356 | |
| 1357 | was_cyclic = test_and_set_bit(DW_DMA_IS_CYCLIC, &dwc->flags); |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1358 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1359 | if (was_cyclic) { |
| 1360 | dev_dbg(chan2dev(&dwc->chan), |
| 1361 | "channel already prepared for cyclic DMA\n"); |
| 1362 | return ERR_PTR(-EBUSY); |
| 1363 | } |
| 1364 | |
| 1365 | retval = ERR_PTR(-EINVAL); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1366 | |
Andy Shevchenko | f44b92f | 2013-01-10 10:52:58 +0200 | [diff] [blame] | 1367 | if (unlikely(!is_slave_direction(direction))) |
| 1368 | goto out_err; |
| 1369 | |
Andy Shevchenko | 0fdb567 | 2013-01-10 10:53:03 +0200 | [diff] [blame] | 1370 | dwc->direction = direction; |
| 1371 | |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1372 | if (direction == DMA_MEM_TO_DEV) |
| 1373 | reg_width = __ffs(sconfig->dst_addr_width); |
| 1374 | else |
| 1375 | reg_width = __ffs(sconfig->src_addr_width); |
| 1376 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1377 | periods = buf_len / period_len; |
| 1378 | |
| 1379 | /* Check for too big/unaligned periods and unaligned DMA buffer. */ |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1380 | if (period_len > (dwc->block_size << reg_width)) |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1381 | goto out_err; |
| 1382 | if (unlikely(period_len & ((1 << reg_width) - 1))) |
| 1383 | goto out_err; |
| 1384 | if (unlikely(buf_addr & ((1 << reg_width) - 1))) |
| 1385 | goto out_err; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1386 | |
| 1387 | retval = ERR_PTR(-ENOMEM); |
| 1388 | |
| 1389 | if (periods > NR_DESCS_PER_CHANNEL) |
| 1390 | goto out_err; |
| 1391 | |
| 1392 | cdesc = kzalloc(sizeof(struct dw_cyclic_desc), GFP_KERNEL); |
| 1393 | if (!cdesc) |
| 1394 | goto out_err; |
| 1395 | |
| 1396 | cdesc->desc = kzalloc(sizeof(struct dw_desc *) * periods, GFP_KERNEL); |
| 1397 | if (!cdesc->desc) |
| 1398 | goto out_err_alloc; |
| 1399 | |
| 1400 | for (i = 0; i < periods; i++) { |
| 1401 | desc = dwc_desc_get(dwc); |
| 1402 | if (!desc) |
| 1403 | goto out_err_desc_get; |
| 1404 | |
| 1405 | switch (direction) { |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 1406 | case DMA_MEM_TO_DEV: |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 1407 | lli_write(desc, dar, sconfig->dst_addr); |
| 1408 | lli_write(desc, sar, buf_addr + period_len * i); |
| 1409 | lli_write(desc, ctllo, (DWC_DEFAULT_CTLLO(chan) |
| 1410 | | DWC_CTLL_DST_WIDTH(reg_width) |
| 1411 | | DWC_CTLL_SRC_WIDTH(reg_width) |
| 1412 | | DWC_CTLL_DST_FIX |
| 1413 | | DWC_CTLL_SRC_INC |
| 1414 | | DWC_CTLL_INT_EN)); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1415 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 1416 | lli_set(desc, ctllo, sconfig->device_fc ? |
| 1417 | DWC_CTLL_FC(DW_DMA_FC_P_M2P) : |
| 1418 | DWC_CTLL_FC(DW_DMA_FC_D_M2P)); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1419 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1420 | break; |
Vinod Koul | db8196d | 2011-10-13 22:34:23 +0530 | [diff] [blame] | 1421 | case DMA_DEV_TO_MEM: |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 1422 | lli_write(desc, dar, buf_addr + period_len * i); |
| 1423 | lli_write(desc, sar, sconfig->src_addr); |
| 1424 | lli_write(desc, ctllo, (DWC_DEFAULT_CTLLO(chan) |
| 1425 | | DWC_CTLL_SRC_WIDTH(reg_width) |
| 1426 | | DWC_CTLL_DST_WIDTH(reg_width) |
| 1427 | | DWC_CTLL_DST_INC |
| 1428 | | DWC_CTLL_SRC_FIX |
| 1429 | | DWC_CTLL_INT_EN)); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1430 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 1431 | lli_set(desc, ctllo, sconfig->device_fc ? |
| 1432 | DWC_CTLL_FC(DW_DMA_FC_P_P2M) : |
| 1433 | DWC_CTLL_FC(DW_DMA_FC_D_P2M)); |
Viresh Kumar | 327e697 | 2012-02-01 16:12:26 +0530 | [diff] [blame] | 1434 | |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1435 | break; |
| 1436 | default: |
| 1437 | break; |
| 1438 | } |
| 1439 | |
Mans Rullgard | df1f3a2 | 2016-03-18 16:24:43 +0200 | [diff] [blame] | 1440 | lli_write(desc, ctlhi, period_len >> reg_width); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1441 | cdesc->desc[i] = desc; |
| 1442 | |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1443 | if (last) |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 1444 | lli_write(last, llp, desc->txd.phys | lms); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1445 | |
| 1446 | last = desc; |
| 1447 | } |
| 1448 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 1449 | /* Let's make a cyclic list */ |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 1450 | lli_write(last, llp, cdesc->desc[0]->txd.phys | lms); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1451 | |
Andy Shevchenko | 5a87f0e | 2014-01-13 14:04:50 +0200 | [diff] [blame] | 1452 | dev_dbg(chan2dev(&dwc->chan), |
| 1453 | "cyclic prepared buf %pad len %zu period %zu periods %d\n", |
| 1454 | &buf_addr, buf_len, period_len, periods); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1455 | |
| 1456 | cdesc->periods = periods; |
| 1457 | dwc->cdesc = cdesc; |
| 1458 | |
| 1459 | return cdesc; |
| 1460 | |
| 1461 | out_err_desc_get: |
| 1462 | while (i--) |
| 1463 | dwc_desc_put(dwc, cdesc->desc[i]); |
| 1464 | out_err_alloc: |
| 1465 | kfree(cdesc); |
| 1466 | out_err: |
| 1467 | clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); |
| 1468 | return (struct dw_cyclic_desc *)retval; |
| 1469 | } |
| 1470 | EXPORT_SYMBOL(dw_dma_cyclic_prep); |
| 1471 | |
| 1472 | /** |
| 1473 | * dw_dma_cyclic_free - free a prepared cyclic DMA transfer |
| 1474 | * @chan: the DMA channel to free |
| 1475 | */ |
| 1476 | void dw_dma_cyclic_free(struct dma_chan *chan) |
| 1477 | { |
| 1478 | struct dw_dma_chan *dwc = to_dw_dma_chan(chan); |
| 1479 | struct dw_dma *dw = to_dw_dma(dwc->chan.device); |
| 1480 | struct dw_cyclic_desc *cdesc = dwc->cdesc; |
Andy Shevchenko | 7794e5b | 2016-03-18 16:24:48 +0200 | [diff] [blame] | 1481 | unsigned int i; |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1482 | unsigned long flags; |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1483 | |
Andy Shevchenko | 2e4c364 | 2012-06-19 13:34:05 +0300 | [diff] [blame] | 1484 | dev_dbg(chan2dev(&dwc->chan), "%s\n", __func__); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1485 | |
| 1486 | if (!cdesc) |
| 1487 | return; |
| 1488 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1489 | spin_lock_irqsave(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1490 | |
Andy Shevchenko | 3f936207 | 2012-06-19 13:46:32 +0300 | [diff] [blame] | 1491 | dwc_chan_disable(dw, dwc); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1492 | |
Mans Rullgard | 2895b2c | 2016-01-11 13:04:29 +0000 | [diff] [blame] | 1493 | dma_writel(dw, CLEAR.BLOCK, dwc->mask); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1494 | dma_writel(dw, CLEAR.ERROR, dwc->mask); |
| 1495 | dma_writel(dw, CLEAR.XFER, dwc->mask); |
| 1496 | |
Viresh Kumar | 69cea5a | 2011-04-15 16:03:35 +0530 | [diff] [blame] | 1497 | spin_unlock_irqrestore(&dwc->lock, flags); |
Hans-Christian Egtvedt | d9de451 | 2009-04-01 15:47:02 +0200 | [diff] [blame] | 1498 | |
| 1499 | for (i = 0; i < cdesc->periods; i++) |
| 1500 | dwc_desc_put(dwc, cdesc->desc[i]); |
| 1501 | |
| 1502 | kfree(cdesc->desc); |
| 1503 | kfree(cdesc); |
| 1504 | |
| 1505 | clear_bit(DW_DMA_IS_CYCLIC, &dwc->flags); |
| 1506 | } |
| 1507 | EXPORT_SYMBOL(dw_dma_cyclic_free); |
| 1508 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1509 | /*----------------------------------------------------------------------*/ |
| 1510 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1511 | int dw_dma_probe(struct dw_dma_chip *chip, struct dw_dma_platform_data *pdata) |
Viresh Kumar | a9ddb57 | 2012-10-16 09:49:17 +0530 | [diff] [blame] | 1512 | { |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1513 | struct dw_dma *dw; |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1514 | bool autocfg = false; |
Andy Shevchenko | 482c67e | 2012-09-21 15:05:46 +0300 | [diff] [blame] | 1515 | unsigned int dw_params; |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1516 | unsigned int max_blk_size = 0; |
Andy Shevchenko | 7794e5b | 2016-03-18 16:24:48 +0200 | [diff] [blame] | 1517 | unsigned int i; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1518 | int err; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1519 | |
Andy Shevchenko | 000871c | 2014-03-05 15:48:12 +0200 | [diff] [blame] | 1520 | dw = devm_kzalloc(chip->dev, sizeof(*dw), GFP_KERNEL); |
| 1521 | if (!dw) |
| 1522 | return -ENOMEM; |
| 1523 | |
| 1524 | dw->regs = chip->regs; |
| 1525 | chip->dw = dw; |
| 1526 | |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 1527 | pm_runtime_get_sync(chip->dev); |
| 1528 | |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1529 | if (!pdata) { |
Andy Shevchenko | 897e40d | 2016-03-18 16:24:46 +0200 | [diff] [blame] | 1530 | dw_params = dma_readl(dw, DW_PARAMS); |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1531 | dev_dbg(chip->dev, "DW_PARAMS: 0x%08x\n", dw_params); |
Andy Shevchenko | 482c67e | 2012-09-21 15:05:46 +0300 | [diff] [blame] | 1532 | |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1533 | autocfg = dw_params >> DW_PARAMS_EN & 1; |
| 1534 | if (!autocfg) { |
| 1535 | err = -EINVAL; |
| 1536 | goto err_pdata; |
| 1537 | } |
Andy Shevchenko | 123de54 | 2013-01-09 10:17:01 +0200 | [diff] [blame] | 1538 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1539 | pdata = devm_kzalloc(chip->dev, sizeof(*pdata), GFP_KERNEL); |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1540 | if (!pdata) { |
| 1541 | err = -ENOMEM; |
| 1542 | goto err_pdata; |
| 1543 | } |
Andy Shevchenko | 123de54 | 2013-01-09 10:17:01 +0200 | [diff] [blame] | 1544 | |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1545 | /* Get hardware configuration parameters */ |
| 1546 | pdata->nr_channels = (dw_params >> DW_PARAMS_NR_CHAN & 7) + 1; |
| 1547 | pdata->nr_masters = (dw_params >> DW_PARAMS_NR_MASTER & 3) + 1; |
| 1548 | for (i = 0; i < pdata->nr_masters; i++) { |
| 1549 | pdata->data_width[i] = |
| 1550 | (dw_params >> DW_PARAMS_DATA_WIDTH(i) & 3) + 2; |
| 1551 | } |
| 1552 | max_blk_size = dma_readl(dw, MAX_BLK_SIZE); |
| 1553 | |
Andy Shevchenko | 123de54 | 2013-01-09 10:17:01 +0200 | [diff] [blame] | 1554 | /* Fill platform data with the default values */ |
| 1555 | pdata->is_private = true; |
Andy Shevchenko | df5c738 | 2015-10-13 20:09:19 +0300 | [diff] [blame] | 1556 | pdata->is_memcpy = true; |
Andy Shevchenko | 123de54 | 2013-01-09 10:17:01 +0200 | [diff] [blame] | 1557 | pdata->chan_allocation_order = CHAN_ALLOCATION_ASCENDING; |
| 1558 | pdata->chan_priority = CHAN_PRIORITY_ASCENDING; |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1559 | } else if (pdata->nr_channels > DW_DMA_MAX_NR_CHANNELS) { |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1560 | err = -EINVAL; |
| 1561 | goto err_pdata; |
| 1562 | } |
Andy Shevchenko | 123de54 | 2013-01-09 10:17:01 +0200 | [diff] [blame] | 1563 | |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1564 | dw->chan = devm_kcalloc(chip->dev, pdata->nr_channels, sizeof(*dw->chan), |
Andy Shevchenko | 000871c | 2014-03-05 15:48:12 +0200 | [diff] [blame] | 1565 | GFP_KERNEL); |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1566 | if (!dw->chan) { |
| 1567 | err = -ENOMEM; |
| 1568 | goto err_pdata; |
| 1569 | } |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1570 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 1571 | /* Get hardware configuration parameters */ |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1572 | dw->nr_masters = pdata->nr_masters; |
| 1573 | for (i = 0; i < dw->nr_masters; i++) |
| 1574 | dw->data_width[i] = pdata->data_width[i]; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 1575 | |
Andy Shevchenko | 11f932e | 2012-06-19 13:34:06 +0300 | [diff] [blame] | 1576 | /* Calculate all channel mask before DMA setup */ |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1577 | dw->all_chan_mask = (1 << pdata->nr_channels) - 1; |
Andy Shevchenko | 11f932e | 2012-06-19 13:34:06 +0300 | [diff] [blame] | 1578 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 1579 | /* Force dma off, just in case */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1580 | dw_dma_off(dw); |
| 1581 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 1582 | /* Create a pool of consistent memory blocks for hardware descriptors */ |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1583 | dw->desc_pool = dmam_pool_create("dw_dmac_desc_pool", chip->dev, |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1584 | sizeof(struct dw_desc), 4, 0); |
| 1585 | if (!dw->desc_pool) { |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1586 | dev_err(chip->dev, "No memory for descriptors dma pool\n"); |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1587 | err = -ENOMEM; |
| 1588 | goto err_pdata; |
Andy Shevchenko | f8122a8 | 2013-01-16 15:48:50 +0200 | [diff] [blame] | 1589 | } |
| 1590 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1591 | tasklet_init(&dw->tasklet, dw_dma_tasklet, (unsigned long)dw); |
| 1592 | |
Andy Shevchenko | 97977f7 | 2014-05-07 10:56:24 +0300 | [diff] [blame] | 1593 | err = request_irq(chip->irq, dw_dma_interrupt, IRQF_SHARED, |
| 1594 | "dw_dmac", dw); |
| 1595 | if (err) |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1596 | goto err_pdata; |
Andy Shevchenko | 97977f7 | 2014-05-07 10:56:24 +0300 | [diff] [blame] | 1597 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1598 | INIT_LIST_HEAD(&dw->dma.channels); |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1599 | for (i = 0; i < pdata->nr_channels; i++) { |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1600 | struct dw_dma_chan *dwc = &dw->chan[i]; |
| 1601 | |
| 1602 | dwc->chan.device = &dw->dma; |
Russell King - ARM Linux | d3ee98cdc | 2012-03-06 22:35:47 +0000 | [diff] [blame] | 1603 | dma_cookie_init(&dwc->chan); |
Viresh Kumar | b0c3130 | 2011-03-03 15:47:21 +0530 | [diff] [blame] | 1604 | if (pdata->chan_allocation_order == CHAN_ALLOCATION_ASCENDING) |
| 1605 | list_add_tail(&dwc->chan.device_node, |
| 1606 | &dw->dma.channels); |
| 1607 | else |
| 1608 | list_add(&dwc->chan.device_node, &dw->dma.channels); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1609 | |
Viresh Kumar | 93317e8 | 2011-03-03 15:47:22 +0530 | [diff] [blame] | 1610 | /* 7 is highest priority & 0 is lowest. */ |
| 1611 | if (pdata->chan_priority == CHAN_PRIORITY_ASCENDING) |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1612 | dwc->priority = pdata->nr_channels - i - 1; |
Viresh Kumar | 93317e8 | 2011-03-03 15:47:22 +0530 | [diff] [blame] | 1613 | else |
| 1614 | dwc->priority = i; |
| 1615 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1616 | dwc->ch_regs = &__dw_regs(dw)->CHAN[i]; |
| 1617 | spin_lock_init(&dwc->lock); |
| 1618 | dwc->mask = 1 << i; |
| 1619 | |
| 1620 | INIT_LIST_HEAD(&dwc->active_list); |
| 1621 | INIT_LIST_HEAD(&dwc->queue); |
| 1622 | INIT_LIST_HEAD(&dwc->free_list); |
| 1623 | |
| 1624 | channel_clear_bit(dw, CH_EN, dwc->mask); |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1625 | |
Andy Shevchenko | 0fdb567 | 2013-01-10 10:53:03 +0200 | [diff] [blame] | 1626 | dwc->direction = DMA_TRANS_NONE; |
Andy Shevchenko | a098200 | 2012-09-21 15:05:48 +0300 | [diff] [blame] | 1627 | |
Andy Shevchenko | 75c6122 | 2013-03-26 16:53:54 +0200 | [diff] [blame] | 1628 | /* Hardware configuration */ |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1629 | if (autocfg) { |
Andy Shevchenko | 6bea0f6 | 2015-09-28 18:57:03 +0300 | [diff] [blame] | 1630 | unsigned int r = DW_DMA_MAX_NR_CHANNELS - i - 1; |
Andy Shevchenko | 897e40d | 2016-03-18 16:24:46 +0200 | [diff] [blame] | 1631 | void __iomem *addr = &__dw_regs(dw)->DWC_PARAMS[r]; |
| 1632 | unsigned int dwc_params = dma_readl_native(addr); |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1633 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1634 | dev_dbg(chip->dev, "DWC_PARAMS[%d]: 0x%08x\n", i, |
| 1635 | dwc_params); |
Andy Shevchenko | 985a6c7 | 2013-01-18 17:10:59 +0200 | [diff] [blame] | 1636 | |
Andy Shevchenko | 1d566f1 | 2014-01-13 14:04:48 +0200 | [diff] [blame] | 1637 | /* |
| 1638 | * Decode maximum block size for given channel. The |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1639 | * stored 4 bit value represents blocks from 0x00 for 3 |
Andy Shevchenko | 1d566f1 | 2014-01-13 14:04:48 +0200 | [diff] [blame] | 1640 | * up to 0x0a for 4095. |
| 1641 | */ |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1642 | dwc->block_size = |
| 1643 | (4 << ((max_blk_size >> 4 * i) & 0xf)) - 1; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1644 | dwc->nollp = |
| 1645 | (dwc_params >> DWC_PARAMS_MBLK_EN & 0x1) == 0; |
| 1646 | } else { |
Andy Shevchenko | 4a63a8b | 2012-09-21 15:05:47 +0300 | [diff] [blame] | 1647 | dwc->block_size = pdata->block_size; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1648 | |
| 1649 | /* Check if channel supports multi block transfer */ |
Mans Rullgard | 2a0fae0 | 2016-03-18 16:24:44 +0200 | [diff] [blame] | 1650 | channel_writel(dwc, LLP, DWC_LLP_LOC(0xffffffff)); |
| 1651 | dwc->nollp = DWC_LLP_LOC(channel_readl(dwc, LLP)) == 0; |
Andy Shevchenko | fed2574 | 2012-09-21 15:05:49 +0300 | [diff] [blame] | 1652 | channel_writel(dwc, LLP, 0); |
| 1653 | } |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1654 | } |
| 1655 | |
Andy Shevchenko | 11f932e | 2012-06-19 13:34:06 +0300 | [diff] [blame] | 1656 | /* Clear all interrupts on all channels. */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1657 | dma_writel(dw, CLEAR.XFER, dw->all_chan_mask); |
Andy Shevchenko | 236b106 | 2012-06-19 13:34:07 +0300 | [diff] [blame] | 1658 | dma_writel(dw, CLEAR.BLOCK, dw->all_chan_mask); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1659 | dma_writel(dw, CLEAR.SRC_TRAN, dw->all_chan_mask); |
| 1660 | dma_writel(dw, CLEAR.DST_TRAN, dw->all_chan_mask); |
| 1661 | dma_writel(dw, CLEAR.ERROR, dw->all_chan_mask); |
| 1662 | |
Andy Shevchenko | df5c738 | 2015-10-13 20:09:19 +0300 | [diff] [blame] | 1663 | /* Set capabilities */ |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1664 | dma_cap_set(DMA_SLAVE, dw->dma.cap_mask); |
Jamie Iles | 95ea759 | 2011-01-21 14:11:54 +0000 | [diff] [blame] | 1665 | if (pdata->is_private) |
| 1666 | dma_cap_set(DMA_PRIVATE, dw->dma.cap_mask); |
Andy Shevchenko | df5c738 | 2015-10-13 20:09:19 +0300 | [diff] [blame] | 1667 | if (pdata->is_memcpy) |
| 1668 | dma_cap_set(DMA_MEMCPY, dw->dma.cap_mask); |
| 1669 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1670 | dw->dma.dev = chip->dev; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1671 | dw->dma.device_alloc_chan_resources = dwc_alloc_chan_resources; |
| 1672 | dw->dma.device_free_chan_resources = dwc_free_chan_resources; |
| 1673 | |
| 1674 | dw->dma.device_prep_dma_memcpy = dwc_prep_dma_memcpy; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1675 | dw->dma.device_prep_slave_sg = dwc_prep_slave_sg; |
Andy Shevchenko | 029a40e | 2015-01-02 16:17:24 +0200 | [diff] [blame] | 1676 | |
Maxime Ripard | a4b0d34 | 2014-11-17 14:42:12 +0100 | [diff] [blame] | 1677 | dw->dma.device_config = dwc_config; |
| 1678 | dw->dma.device_pause = dwc_pause; |
| 1679 | dw->dma.device_resume = dwc_resume; |
| 1680 | dw->dma.device_terminate_all = dwc_terminate_all; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1681 | |
Linus Walleij | 0793448 | 2010-03-26 16:50:49 -0700 | [diff] [blame] | 1682 | dw->dma.device_tx_status = dwc_tx_status; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1683 | dw->dma.device_issue_pending = dwc_issue_pending; |
| 1684 | |
Andy Shevchenko | 029a40e | 2015-01-02 16:17:24 +0200 | [diff] [blame] | 1685 | /* DMA capabilities */ |
| 1686 | dw->dma.src_addr_widths = DW_DMA_BUSWIDTHS; |
| 1687 | dw->dma.dst_addr_widths = DW_DMA_BUSWIDTHS; |
| 1688 | dw->dma.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) | |
| 1689 | BIT(DMA_MEM_TO_MEM); |
| 1690 | dw->dma.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; |
| 1691 | |
Andy Shevchenko | 1222934 | 2014-05-08 12:01:50 +0300 | [diff] [blame] | 1692 | err = dma_async_device_register(&dw->dma); |
| 1693 | if (err) |
| 1694 | goto err_dma_register; |
| 1695 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1696 | dev_info(chip->dev, "DesignWare DMA Controller, %d channels\n", |
Andy Shevchenko | 30cb263 | 2015-10-13 20:09:17 +0300 | [diff] [blame] | 1697 | pdata->nr_channels); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1698 | |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 1699 | pm_runtime_put_sync_suspend(chip->dev); |
| 1700 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1701 | return 0; |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1702 | |
Andy Shevchenko | 1222934 | 2014-05-08 12:01:50 +0300 | [diff] [blame] | 1703 | err_dma_register: |
| 1704 | free_irq(chip->irq, dw); |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1705 | err_pdata: |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 1706 | pm_runtime_put_sync_suspend(chip->dev); |
Andy Shevchenko | 8be4f52 | 2014-05-08 12:01:49 +0300 | [diff] [blame] | 1707 | return err; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1708 | } |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1709 | EXPORT_SYMBOL_GPL(dw_dma_probe); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1710 | |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1711 | int dw_dma_remove(struct dw_dma_chip *chip) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1712 | { |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1713 | struct dw_dma *dw = chip->dw; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1714 | struct dw_dma_chan *dwc, *_dwc; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1715 | |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 1716 | pm_runtime_get_sync(chip->dev); |
| 1717 | |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1718 | dw_dma_off(dw); |
| 1719 | dma_async_device_unregister(&dw->dma); |
| 1720 | |
Andy Shevchenko | 97977f7 | 2014-05-07 10:56:24 +0300 | [diff] [blame] | 1721 | free_irq(chip->irq, dw); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1722 | tasklet_kill(&dw->tasklet); |
| 1723 | |
| 1724 | list_for_each_entry_safe(dwc, _dwc, &dw->dma.channels, |
| 1725 | chan.device_node) { |
| 1726 | list_del(&dwc->chan.device_node); |
| 1727 | channel_clear_bit(dw, CH_EN, dwc->mask); |
| 1728 | } |
| 1729 | |
Andy Shevchenko | bb32baf | 2014-11-05 18:34:48 +0200 | [diff] [blame] | 1730 | pm_runtime_put_sync_suspend(chip->dev); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1731 | return 0; |
| 1732 | } |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1733 | EXPORT_SYMBOL_GPL(dw_dma_remove); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1734 | |
Andy Shevchenko | 2540f74 | 2014-09-23 17:18:13 +0300 | [diff] [blame] | 1735 | int dw_dma_disable(struct dw_dma_chip *chip) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1736 | { |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1737 | struct dw_dma *dw = chip->dw; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1738 | |
Andy Shevchenko | 6168d56 | 2012-10-18 17:34:10 +0300 | [diff] [blame] | 1739 | dw_dma_off(dw); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1740 | return 0; |
| 1741 | } |
Andy Shevchenko | 2540f74 | 2014-09-23 17:18:13 +0300 | [diff] [blame] | 1742 | EXPORT_SYMBOL_GPL(dw_dma_disable); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1743 | |
Andy Shevchenko | 2540f74 | 2014-09-23 17:18:13 +0300 | [diff] [blame] | 1744 | int dw_dma_enable(struct dw_dma_chip *chip) |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1745 | { |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1746 | struct dw_dma *dw = chip->dw; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1747 | |
Andy Shevchenko | 7a83c04 | 2014-09-23 17:18:12 +0300 | [diff] [blame] | 1748 | dw_dma_on(dw); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1749 | return 0; |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1750 | } |
Andy Shevchenko | 2540f74 | 2014-09-23 17:18:13 +0300 | [diff] [blame] | 1751 | EXPORT_SYMBOL_GPL(dw_dma_enable); |
Haavard Skinnemoen | 3bfb1d2 | 2008-07-08 11:59:42 -0700 | [diff] [blame] | 1752 | |
| 1753 | MODULE_LICENSE("GPL v2"); |
Andy Shevchenko | 9cade1a | 2013-06-05 15:26:45 +0300 | [diff] [blame] | 1754 | MODULE_DESCRIPTION("Synopsys DesignWare DMA Controller core driver"); |
Jean Delvare | e05503e | 2011-05-18 16:49:24 +0200 | [diff] [blame] | 1755 | MODULE_AUTHOR("Haavard Skinnemoen (Atmel)"); |
Viresh Kumar | da89947 | 2015-07-17 16:23:50 -0700 | [diff] [blame] | 1756 | MODULE_AUTHOR("Viresh Kumar <vireshk@kernel.org>"); |