blob: 88965597b7d08c9e2b9987957f85284cc9851f4c [file] [log] [blame]
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -07001/*
2 * Driver for the Synopsys DesignWare AHB DMA Controller
3 *
4 * Copyright (C) 2005-2007 Atmel Corporation
Viresh Kumaraecb7b62011-05-24 14:04:09 +05305 * Copyright (C) 2010-2011 ST Microelectronics
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -07006 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 */
11
12#include <linux/dw_dmac.h>
13
14#define DW_DMA_MAX_NR_CHANNELS 8
15
Viresh Kumara1c46012012-02-01 16:12:28 +053016/* flow controller */
17enum dw_dma_fc {
18 DW_DMA_FC_D_M2M,
19 DW_DMA_FC_D_M2P,
20 DW_DMA_FC_D_P2M,
21 DW_DMA_FC_D_P2P,
22 DW_DMA_FC_P_P2M,
23 DW_DMA_FC_SP_P2P,
24 DW_DMA_FC_P_M2P,
25 DW_DMA_FC_DP_P2P,
26};
27
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -070028/*
29 * Redefine this macro to handle differences between 32- and 64-bit
30 * addressing, big vs. little endian, etc.
31 */
32#define DW_REG(name) u32 name; u32 __pad_##name
33
34/* Hardware register definitions. */
35struct dw_dma_chan_regs {
36 DW_REG(SAR); /* Source Address Register */
37 DW_REG(DAR); /* Destination Address Register */
38 DW_REG(LLP); /* Linked List Pointer */
39 u32 CTL_LO; /* Control Register Low */
40 u32 CTL_HI; /* Control Register High */
41 DW_REG(SSTAT);
42 DW_REG(DSTAT);
43 DW_REG(SSTATAR);
44 DW_REG(DSTATAR);
45 u32 CFG_LO; /* Configuration Register Low */
46 u32 CFG_HI; /* Configuration Register High */
47 DW_REG(SGR);
48 DW_REG(DSR);
49};
50
51struct dw_dma_irq_regs {
52 DW_REG(XFER);
53 DW_REG(BLOCK);
54 DW_REG(SRC_TRAN);
55 DW_REG(DST_TRAN);
56 DW_REG(ERROR);
57};
58
59struct dw_dma_regs {
60 /* per-channel registers */
61 struct dw_dma_chan_regs CHAN[DW_DMA_MAX_NR_CHANNELS];
62
63 /* irq handling */
64 struct dw_dma_irq_regs RAW; /* r */
65 struct dw_dma_irq_regs STATUS; /* r (raw & mask) */
66 struct dw_dma_irq_regs MASK; /* rw (set = irq enabled) */
67 struct dw_dma_irq_regs CLEAR; /* w (ack, affects "raw") */
68
69 DW_REG(STATUS_INT); /* r */
70
71 /* software handshaking */
72 DW_REG(REQ_SRC);
73 DW_REG(REQ_DST);
74 DW_REG(SGL_REQ_SRC);
75 DW_REG(SGL_REQ_DST);
76 DW_REG(LAST_SRC);
77 DW_REG(LAST_DST);
78
79 /* miscellaneous */
80 DW_REG(CFG);
81 DW_REG(CH_EN);
82 DW_REG(ID);
83 DW_REG(TEST);
84
Andy Shevchenko2a9fe9a2012-09-21 15:05:45 +030085 /* reserved */
86 DW_REG(__reserved0);
87 DW_REG(__reserved1);
88
Andy Shevchenko745664e2012-06-19 13:34:01 +030089 /* optional encoded params, 0x3c8..0x3f7 */
Andy Shevchenko2a9fe9a2012-09-21 15:05:45 +030090 u32 __reserved;
91
92 /* per-channel configuration registers */
93 u32 DWC_PARAMS[DW_DMA_MAX_NR_CHANNELS];
94 u32 MULTI_BLK_TYPE;
95 u32 MAX_BLK_SIZE;
96
97 /* top-level parameters */
98 u32 DW_PARAMS;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -070099};
100
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700101#ifdef CONFIG_DW_DMAC_BIG_ENDIAN_IO
102#define dma_readl_native ioread32be
103#define dma_writel_native iowrite32be
104#else
105#define dma_readl_native readl
106#define dma_writel_native writel
107#endif
108
Andy Shevchenko2a9fe9a2012-09-21 15:05:45 +0300109/* To access the registers in early stage of probe */
110#define dma_read_byaddr(addr, name) \
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700111 dma_readl_native((addr) + offsetof(struct dw_dma_regs, name))
Andy Shevchenko2a9fe9a2012-09-21 15:05:45 +0300112
113/* Bitfields in DW_PARAMS */
114#define DW_PARAMS_NR_CHAN 8 /* number of channels */
115#define DW_PARAMS_NR_MASTER 11 /* number of AHB masters */
116#define DW_PARAMS_DATA_WIDTH(n) (15 + 2 * (n))
117#define DW_PARAMS_DATA_WIDTH1 15 /* master 1 data width */
118#define DW_PARAMS_DATA_WIDTH2 17 /* master 2 data width */
119#define DW_PARAMS_DATA_WIDTH3 19 /* master 3 data width */
120#define DW_PARAMS_DATA_WIDTH4 21 /* master 4 data width */
121#define DW_PARAMS_EN 28 /* encoded parameters */
122
123/* Bitfields in DWC_PARAMS */
124#define DWC_PARAMS_MBLK_EN 11 /* multi block transfer */
125
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700126/* Bitfields in CTL_LO */
127#define DWC_CTLL_INT_EN (1 << 0) /* irqs enabled? */
128#define DWC_CTLL_DST_WIDTH(n) ((n)<<1) /* bytes per element */
129#define DWC_CTLL_SRC_WIDTH(n) ((n)<<4)
130#define DWC_CTLL_DST_INC (0<<7) /* DAR update/not */
131#define DWC_CTLL_DST_DEC (1<<7)
132#define DWC_CTLL_DST_FIX (2<<7)
133#define DWC_CTLL_SRC_INC (0<<7) /* SAR update/not */
134#define DWC_CTLL_SRC_DEC (1<<9)
135#define DWC_CTLL_SRC_FIX (2<<9)
136#define DWC_CTLL_DST_MSIZE(n) ((n)<<11) /* burst, #elements */
137#define DWC_CTLL_SRC_MSIZE(n) ((n)<<14)
138#define DWC_CTLL_S_GATH_EN (1 << 17) /* src gather, !FIX */
139#define DWC_CTLL_D_SCAT_EN (1 << 18) /* dst scatter, !FIX */
Viresh KUMARee665092011-03-04 15:42:51 +0530140#define DWC_CTLL_FC(n) ((n) << 20)
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700141#define DWC_CTLL_FC_M2M (0 << 20) /* mem-to-mem */
142#define DWC_CTLL_FC_M2P (1 << 20) /* mem-to-periph */
143#define DWC_CTLL_FC_P2M (2 << 20) /* periph-to-mem */
144#define DWC_CTLL_FC_P2P (3 << 20) /* periph-to-periph */
145/* plus 4 transfer types for peripheral-as-flow-controller */
146#define DWC_CTLL_DMS(n) ((n)<<23) /* dst master select */
147#define DWC_CTLL_SMS(n) ((n)<<25) /* src master select */
148#define DWC_CTLL_LLP_D_EN (1 << 27) /* dest block chain */
149#define DWC_CTLL_LLP_S_EN (1 << 28) /* src block chain */
150
151/* Bitfields in CTL_HI */
152#define DWC_CTLH_DONE 0x00001000
153#define DWC_CTLH_BLOCK_TS_MASK 0x00000fff
154
155/* Bitfields in CFG_LO. Platform-configurable bits are in <linux/dw_dmac.h> */
Viresh Kumar93317e82011-03-03 15:47:22 +0530156#define DWC_CFGL_CH_PRIOR_MASK (0x7 << 5) /* priority mask */
157#define DWC_CFGL_CH_PRIOR(x) ((x) << 5) /* priority */
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700158#define DWC_CFGL_CH_SUSP (1 << 8) /* pause xfer */
159#define DWC_CFGL_FIFO_EMPTY (1 << 9) /* pause xfer */
160#define DWC_CFGL_HS_DST (1 << 10) /* handshake w/dst */
161#define DWC_CFGL_HS_SRC (1 << 11) /* handshake w/src */
162#define DWC_CFGL_MAX_BURST(x) ((x) << 20)
163#define DWC_CFGL_RELOAD_SAR (1 << 30)
164#define DWC_CFGL_RELOAD_DAR (1 << 31)
165
166/* Bitfields in CFG_HI. Platform-configurable bits are in <linux/dw_dmac.h> */
167#define DWC_CFGH_DS_UPD_EN (1 << 5)
168#define DWC_CFGH_SS_UPD_EN (1 << 6)
169
170/* Bitfields in SGR */
171#define DWC_SGR_SGI(x) ((x) << 0)
172#define DWC_SGR_SGC(x) ((x) << 20)
173
174/* Bitfields in DSR */
175#define DWC_DSR_DSI(x) ((x) << 0)
176#define DWC_DSR_DSC(x) ((x) << 20)
177
178/* Bitfields in CFG */
179#define DW_CFG_DMA_EN (1 << 0)
180
Hans-Christian Egtvedtd9de4512009-04-01 15:47:02 +0200181enum dw_dmac_flags {
182 DW_DMA_IS_CYCLIC = 0,
Andy Shevchenkofed25742012-09-21 15:05:49 +0300183 DW_DMA_IS_SOFT_LLP = 1,
Hans-Christian Egtvedtd9de4512009-04-01 15:47:02 +0200184};
185
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700186struct dw_dma_chan {
187 struct dma_chan chan;
188 void __iomem *ch_regs;
189 u8 mask;
Viresh Kumar93317e82011-03-03 15:47:22 +0530190 u8 priority;
Linus Walleija7c57cf2011-04-19 08:31:32 +0800191 bool paused;
Viresh Kumar61e183f2011-11-17 16:01:29 +0530192 bool initialized;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700193
Andy Shevchenkofed25742012-09-21 15:05:49 +0300194 /* software emulation of the LLP transfers */
195 struct list_head *tx_list;
196 struct list_head *tx_node_active;
197
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700198 spinlock_t lock;
199
200 /* these other elements are all protected by lock */
Hans-Christian Egtvedtd9de4512009-04-01 15:47:02 +0200201 unsigned long flags;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700202 struct list_head active_list;
203 struct list_head queue;
204 struct list_head free_list;
Hans-Christian Egtvedtd9de4512009-04-01 15:47:02 +0200205 struct dw_cyclic_desc *cdesc;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700206
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700207 unsigned int descs_allocated;
Viresh Kumar327e6972012-02-01 16:12:26 +0530208
Andy Shevchenko4a63a8b2012-09-21 15:05:47 +0300209 /* hardware configuration */
210 unsigned int block_size;
Andy Shevchenkofed25742012-09-21 15:05:49 +0300211 bool nollp;
Andy Shevchenko4a63a8b2012-09-21 15:05:47 +0300212
Viresh Kumar327e6972012-02-01 16:12:26 +0530213 /* configuration passed via DMA_SLAVE_CONFIG */
214 struct dma_slave_config dma_sconfig;
Andy Shevchenkoa0982002012-09-21 15:05:48 +0300215
216 /* backlink to dw_dma */
217 struct dw_dma *dw;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700218};
219
220static inline struct dw_dma_chan_regs __iomem *
221__dwc_regs(struct dw_dma_chan *dwc)
222{
223 return dwc->ch_regs;
224}
225
226#define channel_readl(dwc, name) \
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700227 dma_readl_native(&(__dwc_regs(dwc)->name))
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700228#define channel_writel(dwc, name, val) \
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700229 dma_writel_native((val), &(__dwc_regs(dwc)->name))
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700230
231static inline struct dw_dma_chan *to_dw_dma_chan(struct dma_chan *chan)
232{
233 return container_of(chan, struct dw_dma_chan, chan);
234}
235
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700236struct dw_dma {
237 struct dma_device dma;
238 void __iomem *regs;
239 struct tasklet_struct tasklet;
240 struct clk *clk;
241
242 u8 all_chan_mask;
243
Andy Shevchenkoa0982002012-09-21 15:05:48 +0300244 /* hardware configuration */
245 unsigned char nr_masters;
246 unsigned char data_width[4];
247
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700248 struct dw_dma_chan chan[0];
249};
250
251static inline struct dw_dma_regs __iomem *__dw_regs(struct dw_dma *dw)
252{
253 return dw->regs;
254}
255
256#define dma_readl(dw, name) \
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700257 dma_readl_native(&(__dw_regs(dw)->name))
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700258#define dma_writel(dw, name, val) \
Hein Tiboschd5ea7b52012-10-25 13:38:05 -0700259 dma_writel_native((val), &(__dw_regs(dw)->name))
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700260
261#define channel_set_bit(dw, reg, mask) \
262 dma_writel(dw, reg, ((mask) << 8) | (mask))
263#define channel_clear_bit(dw, reg, mask) \
264 dma_writel(dw, reg, ((mask) << 8) | 0)
265
266static inline struct dw_dma *to_dw_dma(struct dma_device *ddev)
267{
268 return container_of(ddev, struct dw_dma, dma);
269}
270
271/* LLI == Linked List Item; a.k.a. DMA block descriptor */
272struct dw_lli {
273 /* values that are not changed by hardware */
Andy Shevchenkof8609c22012-07-13 11:09:33 +0300274 u32 sar;
275 u32 dar;
276 u32 llp; /* chain to next lli */
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700277 u32 ctllo;
278 /* values that may get written back: */
279 u32 ctlhi;
280 /* sstat and dstat can snapshot peripheral register state.
281 * silicon config may discard either or both...
282 */
283 u32 sstat;
284 u32 dstat;
285};
286
287struct dw_desc {
288 /* FIRST values the hardware uses */
289 struct dw_lli lli;
290
291 /* THEN values for driver housekeeping */
292 struct list_head desc_node;
Dan Williamse0bd0f82009-09-08 17:53:02 -0700293 struct list_head tx_list;
Haavard Skinnemoen3bfb1d22008-07-08 11:59:42 -0700294 struct dma_async_tx_descriptor txd;
295 size_t len;
296};
297
298static inline struct dw_desc *
299txd_to_dw_desc(struct dma_async_tx_descriptor *txd)
300{
301 return container_of(txd, struct dw_desc, txd);
302}