blob: 935d1e05366b4cb7cc89e4c40d657dfec34423e7 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * include/asm-ppc/ppc4xx_dma.h
3 *
4 * IBM PPC4xx DMA engine library
5 *
6 * Copyright 2000-2004 MontaVista Software Inc.
7 *
8 * Cleaned up a bit more, Matt Porter <mporter@kernel.crashing.org>
9 *
10 * Original code by Armin Kuster <akuster@mvista.com>
11 * and Pete Popov <ppopov@mvista.com>
12 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the
15 * Free Software Foundation; either version 2 of the License, or (at your
16 * option) any later version.
17 *
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#ifdef __KERNEL__
24#ifndef __ASMPPC_PPC4xx_DMA_H
25#define __ASMPPC_PPC4xx_DMA_H
26
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <linux/types.h>
28#include <asm/mmu.h>
29#include <asm/ibm4xx.h>
30
31#undef DEBUG_4xxDMA
32
33#define MAX_PPC4xx_DMA_CHANNELS 4
34
Linus Torvalds1da177e2005-04-16 15:20:36 -070035/*
36 * Function return status codes
37 * These values are used to indicate whether or not the function
38 * call was successful, or a bad/invalid parameter was passed.
39 */
40#define DMA_STATUS_GOOD 0
41#define DMA_STATUS_BAD_CHANNEL 1
42#define DMA_STATUS_BAD_HANDLE 2
43#define DMA_STATUS_BAD_MODE 3
44#define DMA_STATUS_NULL_POINTER 4
45#define DMA_STATUS_OUT_OF_MEMORY 5
46#define DMA_STATUS_SGL_LIST_EMPTY 6
47#define DMA_STATUS_GENERAL_ERROR 7
48#define DMA_STATUS_CHANNEL_NOTFREE 8
49
50#define DMA_CHANNEL_BUSY 0x80000000
51
52/*
53 * These indicate status as returned from the DMA Status Register.
54 */
55#define DMA_STATUS_NO_ERROR 0
56#define DMA_STATUS_CS 1 /* Count Status */
57#define DMA_STATUS_TS 2 /* Transfer Status */
58#define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
59#define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
60
61
62/*
63 * DMA Channel Control Registers
64 */
65
66#ifdef CONFIG_44x
67#define PPC4xx_DMA_64BIT
68#define DMA_CR_OFFSET 1
69#else
70#define DMA_CR_OFFSET 0
71#endif
72
73#define DMA_CE_ENABLE (1<<31) /* DMA Channel Enable */
74#define SET_DMA_CE_ENABLE(x) (((x)&0x1)<<31)
75#define GET_DMA_CE_ENABLE(x) (((x)&DMA_CE_ENABLE)>>31)
76
77#define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
78#define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
79#define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
80
81#define DMA_TD (1<<29)
82#define SET_DMA_TD(x) (((x)&0x1)<<29)
83#define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
84
85#define DMA_PL (1<<28) /* Peripheral Location */
86#define SET_DMA_PL(x) (((x)&0x1)<<28)
87#define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
88
89#define EXTERNAL_PERIPHERAL 0
90#define INTERNAL_PERIPHERAL 1
91
92#define SET_DMA_PW(x) (((x)&0x3)<<(26-DMA_CR_OFFSET)) /* Peripheral Width */
93#define DMA_PW_MASK SET_DMA_PW(3)
94#define PW_8 0
95#define PW_16 1
96#define PW_32 2
97#define PW_64 3
98/* FIXME: Add PW_128 support for 440GP DMA block */
99#define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>(26-DMA_CR_OFFSET))
100
101#define DMA_DAI (1<<(25-DMA_CR_OFFSET)) /* Destination Address Increment */
102#define SET_DMA_DAI(x) (((x)&0x1)<<(25-DMA_CR_OFFSET))
103
104#define DMA_SAI (1<<(24-DMA_CR_OFFSET)) /* Source Address Increment */
105#define SET_DMA_SAI(x) (((x)&0x1)<<(24-DMA_CR_OFFSET))
106
107#define DMA_BEN (1<<(23-DMA_CR_OFFSET)) /* Buffer Enable */
108#define SET_DMA_BEN(x) (((x)&0x1)<<(23-DMA_CR_OFFSET))
109
110#define SET_DMA_TM(x) (((x)&0x3)<<(21-DMA_CR_OFFSET)) /* Transfer Mode */
111#define DMA_TM_MASK SET_DMA_TM(3)
112#define TM_PERIPHERAL 0 /* Peripheral */
113#define TM_RESERVED 1 /* Reserved */
114#define TM_S_MM 2 /* Memory to Memory */
115#define TM_D_MM 3 /* Device Paced Memory to Memory */
116#define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>(21-DMA_CR_OFFSET))
117
118#define SET_DMA_PSC(x) (((x)&0x3)<<(19-DMA_CR_OFFSET)) /* Peripheral Setup Cycles */
119#define DMA_PSC_MASK SET_DMA_PSC(3)
120#define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>(19-DMA_CR_OFFSET))
121
122#define SET_DMA_PWC(x) (((x)&0x3F)<<(13-DMA_CR_OFFSET)) /* Peripheral Wait Cycles */
123#define DMA_PWC_MASK SET_DMA_PWC(0x3F)
124#define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>(13-DMA_CR_OFFSET))
125
126#define SET_DMA_PHC(x) (((x)&0x7)<<(10-DMA_CR_OFFSET)) /* Peripheral Hold Cycles */
127#define DMA_PHC_MASK SET_DMA_PHC(0x7)
128#define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>(10-DMA_CR_OFFSET))
129
130#define DMA_ETD_OUTPUT (1<<(9-DMA_CR_OFFSET)) /* EOT pin is a TC output */
131#define SET_DMA_ETD(x) (((x)&0x1)<<(9-DMA_CR_OFFSET))
132
133#define DMA_TCE_ENABLE (1<<(8-DMA_CR_OFFSET))
134#define SET_DMA_TCE(x) (((x)&0x1)<<(8-DMA_CR_OFFSET))
135
136#define DMA_DEC (1<<(2)) /* Address Decrement */
137#define SET_DMA_DEC(x) (((x)&0x1)<<2)
138#define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
139
140
141/*
142 * Transfer Modes
143 * These modes are defined in a way that makes it possible to
144 * simply "or" in the value in the control register.
145 */
146
147#define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
148
149 /* Device-paced memory to memory, */
150 /* device is at source address */
151#define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
152
153 /* Device-paced memory to memory, */
154 /* device is at destination address */
155#define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
156
157/* 405gp/440gp */
158#define SET_DMA_PREFETCH(x) (((x)&0x3)<<(4-DMA_CR_OFFSET)) /* Memory Read Prefetch */
159#define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
160#define PREFETCH_1 0 /* Prefetch 1 Double Word */
161#define PREFETCH_2 1
162#define PREFETCH_4 2
163#define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>(4-DMA_CR_OFFSET))
164
165#define DMA_PCE (1<<(3-DMA_CR_OFFSET)) /* Parity Check Enable */
166#define SET_DMA_PCE(x) (((x)&0x1)<<(3-DMA_CR_OFFSET))
167#define GET_DMA_PCE(x) (((x)&DMA_PCE)>>(3-DMA_CR_OFFSET))
168
169/* stb3x */
170
171#define DMA_ECE_ENABLE (1<<5)
172#define SET_DMA_ECE(x) (((x)&0x1)<<5)
173#define GET_DMA_ECE(x) (((x)&DMA_ECE_ENABLE)>>5)
174
175#define DMA_TCD_DISABLE (1<<4)
176#define SET_DMA_TCD(x) (((x)&0x1)<<4)
177#define GET_DMA_TCD(x) (((x)&DMA_TCD_DISABLE)>>4)
178
179typedef uint32_t sgl_handle_t;
180
181#ifdef CONFIG_PPC4xx_EDMA
182
183#define SGL_LIST_SIZE 4096
184#define DMA_PPC4xx_SIZE SGL_LIST_SIZE
185
186#define SET_DMA_PRIORITY(x) (((x)&0x3)<<(6-DMA_CR_OFFSET)) /* DMA Channel Priority */
187#define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
188#define PRIORITY_LOW 0
189#define PRIORITY_MID_LOW 1
190#define PRIORITY_MID_HIGH 2
191#define PRIORITY_HIGH 3
192#define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>(6-DMA_CR_OFFSET))
193
194/*
195 * DMA Polarity Configuration Register
196 */
197#define DMAReq_ActiveLow(chan) (1<<(31-(chan*3)))
198#define DMAAck_ActiveLow(chan) (1<<(30-(chan*3)))
199#define EOT_ActiveLow(chan) (1<<(29-(chan*3))) /* End of Transfer */
200
201/*
202 * DMA Sleep Mode Register
203 */
204#define SLEEP_MODE_ENABLE (1<<21)
205
206/*
207 * DMA Status Register
208 */
209#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
210#define DMA_CS1 (1<<30)
211#define DMA_CS2 (1<<29)
212#define DMA_CS3 (1<<28)
213
214#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
215#define DMA_TS1 (1<<26)
216#define DMA_TS2 (1<<25)
217#define DMA_TS3 (1<<24)
218
219#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
220#define DMA_CH1_ERR (1<<22)
221#define DMA_CH2_ERR (1<<21)
222#define DMA_CH3_ERR (1<<20)
223
224#define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
225#define DMA_IN_DMA_REQ1 (1<<18)
226#define DMA_IN_DMA_REQ2 (1<<17)
227#define DMA_IN_DMA_REQ3 (1<<16)
228
229#define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
230#define DMA_EXT_DMA_REQ1 (1<<14)
231#define DMA_EXT_DMA_REQ2 (1<<13)
232#define DMA_EXT_DMA_REQ3 (1<<12)
233
234#define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
235#define DMA_CH1_BUSY (1<<10)
236#define DMA_CH2_BUSY (1<<9)
237#define DMA_CH3_BUSY (1<<8)
238
239#define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
240#define DMA_SG1 (1<<6)
241#define DMA_SG2 (1<<5)
242#define DMA_SG3 (1<<4)
243
244/* DMA Channel Count Register */
245#define DMA_CTC_BTEN (1<<23) /* Burst Enable/Disable bit */
246#define DMA_CTC_BSIZ_MSK (3<<21) /* Mask of the Burst size bits */
247#define DMA_CTC_BSIZ_2 (0)
248#define DMA_CTC_BSIZ_4 (1<<21)
249#define DMA_CTC_BSIZ_8 (2<<21)
250#define DMA_CTC_BSIZ_16 (3<<21)
251
252/*
253 * DMA SG Command Register
254 */
255#define SSG_ENABLE(chan) (1<<(31-chan)) /* Start Scatter Gather */
256#define SSG_MASK_ENABLE(chan) (1<<(15-chan)) /* Enable writing to SSG0 bit */
257
258/*
259 * DMA Scatter/Gather Descriptor Bit fields
260 */
261#define SG_LINK (1<<31) /* Link */
262#define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
263#define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
264#define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
265#define SG_COUNT_MASK 0xFFFF /* Count Field */
266
267#define SET_DMA_CONTROL \
268 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
269 SET_DMA_BEN(p_init->buffer_enable) | /* buffer enable */\
270 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
271 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
272 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
273 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
274 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
275 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
276 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
277 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
278 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
279 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
280 SET_DMA_PREFETCH(p_init->pf) /* read prefetch */)
281
282#define GET_DMA_POLARITY(chan) (DMAReq_ActiveLow(chan) | DMAAck_ActiveLow(chan) | EOT_ActiveLow(chan))
283
Matt Porter28cd1d172005-08-18 11:24:25 -0700284#elif defined(CONFIG_STB03xxx) /* stb03xxx */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700285
286#define DMA_PPC4xx_SIZE 4096
287
288/*
289 * DMA Status Register
290 */
291
292#define SET_DMA_PRIORITY(x) (((x)&0x00800001)) /* DMA Channel Priority */
293#define DMA_PRIORITY_MASK 0x00800001
294#define PRIORITY_LOW 0x00000000
295#define PRIORITY_MID_LOW 0x00000001
296#define PRIORITY_MID_HIGH 0x00800000
297#define PRIORITY_HIGH 0x00800001
298#define GET_DMA_PRIORITY(x) (((((x)&DMA_PRIORITY_MASK) &0x00800000) >> 22 ) | (((x)&DMA_PRIORITY_MASK) &0x00000001))
299
300#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
301#define DMA_CS1 (1<<30)
302#define DMA_CS2 (1<<29)
303#define DMA_CS3 (1<<28)
304
305#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
306#define DMA_TS1 (1<<26)
307#define DMA_TS2 (1<<25)
308#define DMA_TS3 (1<<24)
309
310#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
311#define DMA_CH1_ERR (1<<22)
312#define DMA_CH2_ERR (1<<21)
313#define DMA_CH3_ERR (1<<20)
314
315#define DMA_CT0 (1<<19) /* Chained transfere */
316
317#define DMA_IN_DMA_REQ0 (1<<18) /* Internal DMA Request is pending */
318#define DMA_IN_DMA_REQ1 (1<<17)
319#define DMA_IN_DMA_REQ2 (1<<16)
320#define DMA_IN_DMA_REQ3 (1<<15)
321
322#define DMA_EXT_DMA_REQ0 (1<<14) /* External DMA Request is pending */
323#define DMA_EXT_DMA_REQ1 (1<<13)
324#define DMA_EXT_DMA_REQ2 (1<<12)
325#define DMA_EXT_DMA_REQ3 (1<<11)
326
327#define DMA_CH0_BUSY (1<<10) /* DMA Channel 0 Busy */
328#define DMA_CH1_BUSY (1<<9)
329#define DMA_CH2_BUSY (1<<8)
330#define DMA_CH3_BUSY (1<<7)
331
332#define DMA_CT1 (1<<6) /* Chained transfere */
333#define DMA_CT2 (1<<5)
334#define DMA_CT3 (1<<4)
335
336#define DMA_CH_ENABLE (1<<7)
337#define SET_DMA_CH(x) (((x)&0x1)<<7)
338#define GET_DMA_CH(x) (((x)&DMA_CH_ENABLE)>>7)
339
340/* STBx25xxx dma unique */
341/* enable device port on a dma channel
342 * example ext 0 on dma 1
343 */
344
345#define SSP0_RECV 15
346#define SSP0_XMIT 14
347#define EXT_DMA_0 12
348#define SC1_XMIT 11
349#define SC1_RECV 10
350#define EXT_DMA_2 9
351#define EXT_DMA_3 8
352#define SERIAL2_XMIT 7
353#define SERIAL2_RECV 6
354#define SC0_XMIT 5
355#define SC0_RECV 4
356#define SERIAL1_XMIT 3
357#define SERIAL1_RECV 2
358#define SERIAL0_XMIT 1
359#define SERIAL0_RECV 0
360
361#define DMA_CHAN_0 1
362#define DMA_CHAN_1 2
363#define DMA_CHAN_2 3
364#define DMA_CHAN_3 4
365
366/* end STBx25xx */
367
368/*
369 * Bit 30 must be one for Redwoods, otherwise transfers may receive errors.
370 */
371#define DMA_CR_MB0 0x2
372
373#define SET_DMA_CONTROL \
374 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
375 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
376 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
377 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
378 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
379 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
380 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
381 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
382 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
383 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
384 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
385 SET_DMA_TCD(p_init->tcd_disable) | /* TC chain mode disable */ \
386 SET_DMA_ECE(p_init->ece_enable) | /* ECE chanin mode enable */ \
387 SET_DMA_CH(p_init->ch_enable) | /* Chain enable */ \
388 DMA_CR_MB0 /* must be one */)
389
390#define GET_DMA_POLARITY(chan) chan
391
392#endif
393
394typedef struct {
395 unsigned short in_use; /* set when channel is being used, clr when
396 * available.
397 */
398 /*
399 * Valid polarity settings:
400 * DMAReq_ActiveLow(n)
401 * DMAAck_ActiveLow(n)
402 * EOT_ActiveLow(n)
403 *
404 * n is 0 to max dma chans
405 */
406 unsigned int polarity;
407
408 char buffer_enable; /* Boolean: buffer enable */
409 char tce_enable; /* Boolean: terminal count enable */
410 char etd_output; /* Boolean: eot pin is a tc output */
411 char pce; /* Boolean: parity check enable */
412
413 /*
414 * Peripheral location:
415 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
416 * EXTERNAL_PERIPHERAL
417 */
418 char pl; /* internal/external peripheral */
419
420 /*
421 * Valid pwidth settings:
422 * PW_8
423 * PW_16
424 * PW_32
425 * PW_64
426 */
427 unsigned int pwidth;
428
429 char dai; /* Boolean: dst address increment */
430 char sai; /* Boolean: src address increment */
431
432 /*
433 * Valid psc settings: 0-3
434 */
435 unsigned int psc; /* Peripheral Setup Cycles */
436
437 /*
438 * Valid pwc settings:
439 * 0-63
440 */
441 unsigned int pwc; /* Peripheral Wait Cycles */
442
443 /*
444 * Valid phc settings:
445 * 0-7
446 */
447 unsigned int phc; /* Peripheral Hold Cycles */
448
449 /*
450 * Valid cp (channel priority) settings:
451 * PRIORITY_LOW
452 * PRIORITY_MID_LOW
453 * PRIORITY_MID_HIGH
454 * PRIORITY_HIGH
455 */
456 unsigned int cp; /* channel priority */
457
458 /*
459 * Valid pf (memory read prefetch) settings:
460 *
461 * PREFETCH_1
462 * PREFETCH_2
463 * PREFETCH_4
464 */
465 unsigned int pf; /* memory read prefetch */
466
467 /*
468 * Boolean: channel interrupt enable
469 * NOTE: for sgl transfers, only the last descriptor will be setup to
470 * interrupt.
471 */
472 char int_enable;
473
474 char shift; /* easy access to byte_count shift, based on */
475 /* the width of the channel */
476
477 uint32_t control; /* channel control word */
478
479 /* These variabled are used ONLY in single dma transfers */
480 unsigned int mode; /* transfer mode */
481 phys_addr_t addr;
482 char ce; /* channel enable */
483#ifdef CONFIG_STB03xxx
484 char ch_enable;
485 char tcd_disable;
486 char ece_enable;
487 char td; /* transfer direction */
488#endif
489
490 char int_on_final_sg;/* for scatter/gather - only interrupt on last sg */
491} ppc_dma_ch_t;
492
493/*
494 * PPC44x DMA implementations have a slightly different
495 * descriptor layout. Probably moved about due to the
496 * change to 64-bit addresses and link pointer. I don't
497 * know why they didn't just leave control_count after
498 * the dst_addr.
499 */
500#ifdef PPC4xx_DMA_64BIT
501typedef struct {
502 uint32_t control;
503 uint32_t control_count;
504 phys_addr_t src_addr;
505 phys_addr_t dst_addr;
506 phys_addr_t next;
507} ppc_sgl_t;
508#else
509typedef struct {
510 uint32_t control;
511 phys_addr_t src_addr;
512 phys_addr_t dst_addr;
513 uint32_t control_count;
514 uint32_t next;
515} ppc_sgl_t;
516#endif
517
518typedef struct {
519 unsigned int dmanr;
520 uint32_t control; /* channel ctrl word; loaded from each descrptr */
521 uint32_t sgl_control; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
522 dma_addr_t dma_addr; /* dma (physical) address of this list */
523 ppc_sgl_t *phead;
524 dma_addr_t phead_dma;
525 ppc_sgl_t *ptail;
526 dma_addr_t ptail_dma;
527} sgl_list_info_t;
528
529typedef struct {
530 phys_addr_t *src_addr;
531 phys_addr_t *dst_addr;
532 phys_addr_t dma_src_addr;
533 phys_addr_t dma_dst_addr;
534} pci_alloc_desc_t;
535
536extern ppc_dma_ch_t dma_channels[];
537
538/*
539 * The DMA API are in ppc4xx_dma.c and ppc4xx_sgdma.c
540 */
541extern int ppc4xx_init_dma_channel(unsigned int, ppc_dma_ch_t *);
542extern int ppc4xx_get_channel_config(unsigned int, ppc_dma_ch_t *);
543extern int ppc4xx_set_channel_priority(unsigned int, unsigned int);
544extern unsigned int ppc4xx_get_peripheral_width(unsigned int);
545extern void ppc4xx_set_sg_addr(int, phys_addr_t);
546extern int ppc4xx_add_dma_sgl(sgl_handle_t, phys_addr_t, phys_addr_t, unsigned int);
547extern void ppc4xx_enable_dma_sgl(sgl_handle_t);
548extern void ppc4xx_disable_dma_sgl(sgl_handle_t);
549extern int ppc4xx_get_dma_sgl_residue(sgl_handle_t, phys_addr_t *, phys_addr_t *);
550extern int ppc4xx_delete_dma_sgl_element(sgl_handle_t, phys_addr_t *, phys_addr_t *);
551extern int ppc4xx_alloc_dma_handle(sgl_handle_t *, unsigned int, unsigned int);
552extern void ppc4xx_free_dma_handle(sgl_handle_t);
553extern int ppc4xx_get_dma_status(void);
554extern int ppc4xx_enable_burst(unsigned int);
555extern int ppc4xx_disable_burst(unsigned int);
556extern int ppc4xx_set_burst_size(unsigned int, unsigned int);
557extern void ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr);
558extern void ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr);
559extern void ppc4xx_enable_dma(unsigned int dmanr);
560extern void ppc4xx_disable_dma(unsigned int dmanr);
561extern void ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count);
562extern int ppc4xx_get_dma_residue(unsigned int dmanr);
563extern void ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
564 phys_addr_t dst_dma_addr);
565extern int ppc4xx_enable_dma_interrupt(unsigned int dmanr);
566extern int ppc4xx_disable_dma_interrupt(unsigned int dmanr);
567extern int ppc4xx_clr_dma_status(unsigned int dmanr);
568extern int ppc4xx_map_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
569extern int ppc4xx_disable_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
570extern int ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode);
571
572/* These are in kernel/dma.c: */
573
574/* reserve a DMA channel */
575extern int request_dma(unsigned int dmanr, const char *device_id);
576/* release it again */
577extern void free_dma(unsigned int dmanr);
578#endif
579#endif /* __KERNEL__ */