blob: 46a086fff81629e95d3efad1bc585ed346119702 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * include/asm-ppc/ppc4xx_dma.h
3 *
4 * IBM PPC4xx DMA engine library
5 *
6 * Copyright 2000-2004 MontaVista Software Inc.
7 *
8 * Cleaned up a bit more, Matt Porter <mporter@kernel.crashing.org>
9 *
10 * Original code by Armin Kuster <akuster@mvista.com>
11 * and Pete Popov <ppopov@mvista.com>
12 *
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the
15 * Free Software Foundation; either version 2 of the License, or (at your
16 * option) any later version.
17 *
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#ifdef __KERNEL__
24#ifndef __ASMPPC_PPC4xx_DMA_H
25#define __ASMPPC_PPC4xx_DMA_H
26
27#include <linux/config.h>
28#include <linux/types.h>
29#include <asm/mmu.h>
30#include <asm/ibm4xx.h>
31
32#undef DEBUG_4xxDMA
33
34#define MAX_PPC4xx_DMA_CHANNELS 4
35
Linus Torvalds1da177e2005-04-16 15:20:36 -070036/*
37 * Function return status codes
38 * These values are used to indicate whether or not the function
39 * call was successful, or a bad/invalid parameter was passed.
40 */
41#define DMA_STATUS_GOOD 0
42#define DMA_STATUS_BAD_CHANNEL 1
43#define DMA_STATUS_BAD_HANDLE 2
44#define DMA_STATUS_BAD_MODE 3
45#define DMA_STATUS_NULL_POINTER 4
46#define DMA_STATUS_OUT_OF_MEMORY 5
47#define DMA_STATUS_SGL_LIST_EMPTY 6
48#define DMA_STATUS_GENERAL_ERROR 7
49#define DMA_STATUS_CHANNEL_NOTFREE 8
50
51#define DMA_CHANNEL_BUSY 0x80000000
52
53/*
54 * These indicate status as returned from the DMA Status Register.
55 */
56#define DMA_STATUS_NO_ERROR 0
57#define DMA_STATUS_CS 1 /* Count Status */
58#define DMA_STATUS_TS 2 /* Transfer Status */
59#define DMA_STATUS_DMA_ERROR 3 /* DMA Error Occurred */
60#define DMA_STATUS_DMA_BUSY 4 /* The channel is busy */
61
62
63/*
64 * DMA Channel Control Registers
65 */
66
67#ifdef CONFIG_44x
68#define PPC4xx_DMA_64BIT
69#define DMA_CR_OFFSET 1
70#else
71#define DMA_CR_OFFSET 0
72#endif
73
74#define DMA_CE_ENABLE (1<<31) /* DMA Channel Enable */
75#define SET_DMA_CE_ENABLE(x) (((x)&0x1)<<31)
76#define GET_DMA_CE_ENABLE(x) (((x)&DMA_CE_ENABLE)>>31)
77
78#define DMA_CIE_ENABLE (1<<30) /* DMA Channel Interrupt Enable */
79#define SET_DMA_CIE_ENABLE(x) (((x)&0x1)<<30)
80#define GET_DMA_CIE_ENABLE(x) (((x)&DMA_CIE_ENABLE)>>30)
81
82#define DMA_TD (1<<29)
83#define SET_DMA_TD(x) (((x)&0x1)<<29)
84#define GET_DMA_TD(x) (((x)&DMA_TD)>>29)
85
86#define DMA_PL (1<<28) /* Peripheral Location */
87#define SET_DMA_PL(x) (((x)&0x1)<<28)
88#define GET_DMA_PL(x) (((x)&DMA_PL)>>28)
89
90#define EXTERNAL_PERIPHERAL 0
91#define INTERNAL_PERIPHERAL 1
92
93#define SET_DMA_PW(x) (((x)&0x3)<<(26-DMA_CR_OFFSET)) /* Peripheral Width */
94#define DMA_PW_MASK SET_DMA_PW(3)
95#define PW_8 0
96#define PW_16 1
97#define PW_32 2
98#define PW_64 3
99/* FIXME: Add PW_128 support for 440GP DMA block */
100#define GET_DMA_PW(x) (((x)&DMA_PW_MASK)>>(26-DMA_CR_OFFSET))
101
102#define DMA_DAI (1<<(25-DMA_CR_OFFSET)) /* Destination Address Increment */
103#define SET_DMA_DAI(x) (((x)&0x1)<<(25-DMA_CR_OFFSET))
104
105#define DMA_SAI (1<<(24-DMA_CR_OFFSET)) /* Source Address Increment */
106#define SET_DMA_SAI(x) (((x)&0x1)<<(24-DMA_CR_OFFSET))
107
108#define DMA_BEN (1<<(23-DMA_CR_OFFSET)) /* Buffer Enable */
109#define SET_DMA_BEN(x) (((x)&0x1)<<(23-DMA_CR_OFFSET))
110
111#define SET_DMA_TM(x) (((x)&0x3)<<(21-DMA_CR_OFFSET)) /* Transfer Mode */
112#define DMA_TM_MASK SET_DMA_TM(3)
113#define TM_PERIPHERAL 0 /* Peripheral */
114#define TM_RESERVED 1 /* Reserved */
115#define TM_S_MM 2 /* Memory to Memory */
116#define TM_D_MM 3 /* Device Paced Memory to Memory */
117#define GET_DMA_TM(x) (((x)&DMA_TM_MASK)>>(21-DMA_CR_OFFSET))
118
119#define SET_DMA_PSC(x) (((x)&0x3)<<(19-DMA_CR_OFFSET)) /* Peripheral Setup Cycles */
120#define DMA_PSC_MASK SET_DMA_PSC(3)
121#define GET_DMA_PSC(x) (((x)&DMA_PSC_MASK)>>(19-DMA_CR_OFFSET))
122
123#define SET_DMA_PWC(x) (((x)&0x3F)<<(13-DMA_CR_OFFSET)) /* Peripheral Wait Cycles */
124#define DMA_PWC_MASK SET_DMA_PWC(0x3F)
125#define GET_DMA_PWC(x) (((x)&DMA_PWC_MASK)>>(13-DMA_CR_OFFSET))
126
127#define SET_DMA_PHC(x) (((x)&0x7)<<(10-DMA_CR_OFFSET)) /* Peripheral Hold Cycles */
128#define DMA_PHC_MASK SET_DMA_PHC(0x7)
129#define GET_DMA_PHC(x) (((x)&DMA_PHC_MASK)>>(10-DMA_CR_OFFSET))
130
131#define DMA_ETD_OUTPUT (1<<(9-DMA_CR_OFFSET)) /* EOT pin is a TC output */
132#define SET_DMA_ETD(x) (((x)&0x1)<<(9-DMA_CR_OFFSET))
133
134#define DMA_TCE_ENABLE (1<<(8-DMA_CR_OFFSET))
135#define SET_DMA_TCE(x) (((x)&0x1)<<(8-DMA_CR_OFFSET))
136
137#define DMA_DEC (1<<(2)) /* Address Decrement */
138#define SET_DMA_DEC(x) (((x)&0x1)<<2)
139#define GET_DMA_DEC(x) (((x)&DMA_DEC)>>2)
140
141
142/*
143 * Transfer Modes
144 * These modes are defined in a way that makes it possible to
145 * simply "or" in the value in the control register.
146 */
147
148#define DMA_MODE_MM (SET_DMA_TM(TM_S_MM)) /* memory to memory */
149
150 /* Device-paced memory to memory, */
151 /* device is at source address */
152#define DMA_MODE_MM_DEVATSRC (DMA_TD | SET_DMA_TM(TM_D_MM))
153
154 /* Device-paced memory to memory, */
155 /* device is at destination address */
156#define DMA_MODE_MM_DEVATDST (SET_DMA_TM(TM_D_MM))
157
158/* 405gp/440gp */
159#define SET_DMA_PREFETCH(x) (((x)&0x3)<<(4-DMA_CR_OFFSET)) /* Memory Read Prefetch */
160#define DMA_PREFETCH_MASK SET_DMA_PREFETCH(3)
161#define PREFETCH_1 0 /* Prefetch 1 Double Word */
162#define PREFETCH_2 1
163#define PREFETCH_4 2
164#define GET_DMA_PREFETCH(x) (((x)&DMA_PREFETCH_MASK)>>(4-DMA_CR_OFFSET))
165
166#define DMA_PCE (1<<(3-DMA_CR_OFFSET)) /* Parity Check Enable */
167#define SET_DMA_PCE(x) (((x)&0x1)<<(3-DMA_CR_OFFSET))
168#define GET_DMA_PCE(x) (((x)&DMA_PCE)>>(3-DMA_CR_OFFSET))
169
170/* stb3x */
171
172#define DMA_ECE_ENABLE (1<<5)
173#define SET_DMA_ECE(x) (((x)&0x1)<<5)
174#define GET_DMA_ECE(x) (((x)&DMA_ECE_ENABLE)>>5)
175
176#define DMA_TCD_DISABLE (1<<4)
177#define SET_DMA_TCD(x) (((x)&0x1)<<4)
178#define GET_DMA_TCD(x) (((x)&DMA_TCD_DISABLE)>>4)
179
180typedef uint32_t sgl_handle_t;
181
182#ifdef CONFIG_PPC4xx_EDMA
183
184#define SGL_LIST_SIZE 4096
185#define DMA_PPC4xx_SIZE SGL_LIST_SIZE
186
187#define SET_DMA_PRIORITY(x) (((x)&0x3)<<(6-DMA_CR_OFFSET)) /* DMA Channel Priority */
188#define DMA_PRIORITY_MASK SET_DMA_PRIORITY(3)
189#define PRIORITY_LOW 0
190#define PRIORITY_MID_LOW 1
191#define PRIORITY_MID_HIGH 2
192#define PRIORITY_HIGH 3
193#define GET_DMA_PRIORITY(x) (((x)&DMA_PRIORITY_MASK)>>(6-DMA_CR_OFFSET))
194
195/*
196 * DMA Polarity Configuration Register
197 */
198#define DMAReq_ActiveLow(chan) (1<<(31-(chan*3)))
199#define DMAAck_ActiveLow(chan) (1<<(30-(chan*3)))
200#define EOT_ActiveLow(chan) (1<<(29-(chan*3))) /* End of Transfer */
201
202/*
203 * DMA Sleep Mode Register
204 */
205#define SLEEP_MODE_ENABLE (1<<21)
206
207/*
208 * DMA Status Register
209 */
210#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
211#define DMA_CS1 (1<<30)
212#define DMA_CS2 (1<<29)
213#define DMA_CS3 (1<<28)
214
215#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
216#define DMA_TS1 (1<<26)
217#define DMA_TS2 (1<<25)
218#define DMA_TS3 (1<<24)
219
220#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
221#define DMA_CH1_ERR (1<<22)
222#define DMA_CH2_ERR (1<<21)
223#define DMA_CH3_ERR (1<<20)
224
225#define DMA_IN_DMA_REQ0 (1<<19) /* Internal DMA Request is pending */
226#define DMA_IN_DMA_REQ1 (1<<18)
227#define DMA_IN_DMA_REQ2 (1<<17)
228#define DMA_IN_DMA_REQ3 (1<<16)
229
230#define DMA_EXT_DMA_REQ0 (1<<15) /* External DMA Request is pending */
231#define DMA_EXT_DMA_REQ1 (1<<14)
232#define DMA_EXT_DMA_REQ2 (1<<13)
233#define DMA_EXT_DMA_REQ3 (1<<12)
234
235#define DMA_CH0_BUSY (1<<11) /* DMA Channel 0 Busy */
236#define DMA_CH1_BUSY (1<<10)
237#define DMA_CH2_BUSY (1<<9)
238#define DMA_CH3_BUSY (1<<8)
239
240#define DMA_SG0 (1<<7) /* DMA Channel 0 Scatter/Gather in progress */
241#define DMA_SG1 (1<<6)
242#define DMA_SG2 (1<<5)
243#define DMA_SG3 (1<<4)
244
245/* DMA Channel Count Register */
246#define DMA_CTC_BTEN (1<<23) /* Burst Enable/Disable bit */
247#define DMA_CTC_BSIZ_MSK (3<<21) /* Mask of the Burst size bits */
248#define DMA_CTC_BSIZ_2 (0)
249#define DMA_CTC_BSIZ_4 (1<<21)
250#define DMA_CTC_BSIZ_8 (2<<21)
251#define DMA_CTC_BSIZ_16 (3<<21)
252
253/*
254 * DMA SG Command Register
255 */
256#define SSG_ENABLE(chan) (1<<(31-chan)) /* Start Scatter Gather */
257#define SSG_MASK_ENABLE(chan) (1<<(15-chan)) /* Enable writing to SSG0 bit */
258
259/*
260 * DMA Scatter/Gather Descriptor Bit fields
261 */
262#define SG_LINK (1<<31) /* Link */
263#define SG_TCI_ENABLE (1<<29) /* Enable Terminal Count Interrupt */
264#define SG_ETI_ENABLE (1<<28) /* Enable End of Transfer Interrupt */
265#define SG_ERI_ENABLE (1<<27) /* Enable Error Interrupt */
266#define SG_COUNT_MASK 0xFFFF /* Count Field */
267
268#define SET_DMA_CONTROL \
269 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
270 SET_DMA_BEN(p_init->buffer_enable) | /* buffer enable */\
271 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
272 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
273 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
274 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
275 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
276 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
277 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
278 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
279 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
280 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
281 SET_DMA_PREFETCH(p_init->pf) /* read prefetch */)
282
283#define GET_DMA_POLARITY(chan) (DMAReq_ActiveLow(chan) | DMAAck_ActiveLow(chan) | EOT_ActiveLow(chan))
284
Matt Porter28cd1d172005-08-18 11:24:25 -0700285#elif defined(CONFIG_STB03xxx) /* stb03xxx */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700286
287#define DMA_PPC4xx_SIZE 4096
288
289/*
290 * DMA Status Register
291 */
292
293#define SET_DMA_PRIORITY(x) (((x)&0x00800001)) /* DMA Channel Priority */
294#define DMA_PRIORITY_MASK 0x00800001
295#define PRIORITY_LOW 0x00000000
296#define PRIORITY_MID_LOW 0x00000001
297#define PRIORITY_MID_HIGH 0x00800000
298#define PRIORITY_HIGH 0x00800001
299#define GET_DMA_PRIORITY(x) (((((x)&DMA_PRIORITY_MASK) &0x00800000) >> 22 ) | (((x)&DMA_PRIORITY_MASK) &0x00000001))
300
301#define DMA_CS0 (1<<31) /* Terminal Count has been reached */
302#define DMA_CS1 (1<<30)
303#define DMA_CS2 (1<<29)
304#define DMA_CS3 (1<<28)
305
306#define DMA_TS0 (1<<27) /* End of Transfer has been requested */
307#define DMA_TS1 (1<<26)
308#define DMA_TS2 (1<<25)
309#define DMA_TS3 (1<<24)
310
311#define DMA_CH0_ERR (1<<23) /* DMA Chanel 0 Error */
312#define DMA_CH1_ERR (1<<22)
313#define DMA_CH2_ERR (1<<21)
314#define DMA_CH3_ERR (1<<20)
315
316#define DMA_CT0 (1<<19) /* Chained transfere */
317
318#define DMA_IN_DMA_REQ0 (1<<18) /* Internal DMA Request is pending */
319#define DMA_IN_DMA_REQ1 (1<<17)
320#define DMA_IN_DMA_REQ2 (1<<16)
321#define DMA_IN_DMA_REQ3 (1<<15)
322
323#define DMA_EXT_DMA_REQ0 (1<<14) /* External DMA Request is pending */
324#define DMA_EXT_DMA_REQ1 (1<<13)
325#define DMA_EXT_DMA_REQ2 (1<<12)
326#define DMA_EXT_DMA_REQ3 (1<<11)
327
328#define DMA_CH0_BUSY (1<<10) /* DMA Channel 0 Busy */
329#define DMA_CH1_BUSY (1<<9)
330#define DMA_CH2_BUSY (1<<8)
331#define DMA_CH3_BUSY (1<<7)
332
333#define DMA_CT1 (1<<6) /* Chained transfere */
334#define DMA_CT2 (1<<5)
335#define DMA_CT3 (1<<4)
336
337#define DMA_CH_ENABLE (1<<7)
338#define SET_DMA_CH(x) (((x)&0x1)<<7)
339#define GET_DMA_CH(x) (((x)&DMA_CH_ENABLE)>>7)
340
341/* STBx25xxx dma unique */
342/* enable device port on a dma channel
343 * example ext 0 on dma 1
344 */
345
346#define SSP0_RECV 15
347#define SSP0_XMIT 14
348#define EXT_DMA_0 12
349#define SC1_XMIT 11
350#define SC1_RECV 10
351#define EXT_DMA_2 9
352#define EXT_DMA_3 8
353#define SERIAL2_XMIT 7
354#define SERIAL2_RECV 6
355#define SC0_XMIT 5
356#define SC0_RECV 4
357#define SERIAL1_XMIT 3
358#define SERIAL1_RECV 2
359#define SERIAL0_XMIT 1
360#define SERIAL0_RECV 0
361
362#define DMA_CHAN_0 1
363#define DMA_CHAN_1 2
364#define DMA_CHAN_2 3
365#define DMA_CHAN_3 4
366
367/* end STBx25xx */
368
369/*
370 * Bit 30 must be one for Redwoods, otherwise transfers may receive errors.
371 */
372#define DMA_CR_MB0 0x2
373
374#define SET_DMA_CONTROL \
375 (SET_DMA_CIE_ENABLE(p_init->int_enable) | /* interrupt enable */ \
376 SET_DMA_ETD(p_init->etd_output) | /* end of transfer pin */ \
377 SET_DMA_TCE(p_init->tce_enable) | /* terminal count enable */ \
378 SET_DMA_PL(p_init->pl) | /* peripheral location */ \
379 SET_DMA_DAI(p_init->dai) | /* dest addr increment */ \
380 SET_DMA_SAI(p_init->sai) | /* src addr increment */ \
381 SET_DMA_PRIORITY(p_init->cp) | /* channel priority */ \
382 SET_DMA_PW(p_init->pwidth) | /* peripheral/bus width */ \
383 SET_DMA_PSC(p_init->psc) | /* peripheral setup cycles */ \
384 SET_DMA_PWC(p_init->pwc) | /* peripheral wait cycles */ \
385 SET_DMA_PHC(p_init->phc) | /* peripheral hold cycles */ \
386 SET_DMA_TCD(p_init->tcd_disable) | /* TC chain mode disable */ \
387 SET_DMA_ECE(p_init->ece_enable) | /* ECE chanin mode enable */ \
388 SET_DMA_CH(p_init->ch_enable) | /* Chain enable */ \
389 DMA_CR_MB0 /* must be one */)
390
391#define GET_DMA_POLARITY(chan) chan
392
393#endif
394
395typedef struct {
396 unsigned short in_use; /* set when channel is being used, clr when
397 * available.
398 */
399 /*
400 * Valid polarity settings:
401 * DMAReq_ActiveLow(n)
402 * DMAAck_ActiveLow(n)
403 * EOT_ActiveLow(n)
404 *
405 * n is 0 to max dma chans
406 */
407 unsigned int polarity;
408
409 char buffer_enable; /* Boolean: buffer enable */
410 char tce_enable; /* Boolean: terminal count enable */
411 char etd_output; /* Boolean: eot pin is a tc output */
412 char pce; /* Boolean: parity check enable */
413
414 /*
415 * Peripheral location:
416 * INTERNAL_PERIPHERAL (UART0 on the 405GP)
417 * EXTERNAL_PERIPHERAL
418 */
419 char pl; /* internal/external peripheral */
420
421 /*
422 * Valid pwidth settings:
423 * PW_8
424 * PW_16
425 * PW_32
426 * PW_64
427 */
428 unsigned int pwidth;
429
430 char dai; /* Boolean: dst address increment */
431 char sai; /* Boolean: src address increment */
432
433 /*
434 * Valid psc settings: 0-3
435 */
436 unsigned int psc; /* Peripheral Setup Cycles */
437
438 /*
439 * Valid pwc settings:
440 * 0-63
441 */
442 unsigned int pwc; /* Peripheral Wait Cycles */
443
444 /*
445 * Valid phc settings:
446 * 0-7
447 */
448 unsigned int phc; /* Peripheral Hold Cycles */
449
450 /*
451 * Valid cp (channel priority) settings:
452 * PRIORITY_LOW
453 * PRIORITY_MID_LOW
454 * PRIORITY_MID_HIGH
455 * PRIORITY_HIGH
456 */
457 unsigned int cp; /* channel priority */
458
459 /*
460 * Valid pf (memory read prefetch) settings:
461 *
462 * PREFETCH_1
463 * PREFETCH_2
464 * PREFETCH_4
465 */
466 unsigned int pf; /* memory read prefetch */
467
468 /*
469 * Boolean: channel interrupt enable
470 * NOTE: for sgl transfers, only the last descriptor will be setup to
471 * interrupt.
472 */
473 char int_enable;
474
475 char shift; /* easy access to byte_count shift, based on */
476 /* the width of the channel */
477
478 uint32_t control; /* channel control word */
479
480 /* These variabled are used ONLY in single dma transfers */
481 unsigned int mode; /* transfer mode */
482 phys_addr_t addr;
483 char ce; /* channel enable */
484#ifdef CONFIG_STB03xxx
485 char ch_enable;
486 char tcd_disable;
487 char ece_enable;
488 char td; /* transfer direction */
489#endif
490
491 char int_on_final_sg;/* for scatter/gather - only interrupt on last sg */
492} ppc_dma_ch_t;
493
494/*
495 * PPC44x DMA implementations have a slightly different
496 * descriptor layout. Probably moved about due to the
497 * change to 64-bit addresses and link pointer. I don't
498 * know why they didn't just leave control_count after
499 * the dst_addr.
500 */
501#ifdef PPC4xx_DMA_64BIT
502typedef struct {
503 uint32_t control;
504 uint32_t control_count;
505 phys_addr_t src_addr;
506 phys_addr_t dst_addr;
507 phys_addr_t next;
508} ppc_sgl_t;
509#else
510typedef struct {
511 uint32_t control;
512 phys_addr_t src_addr;
513 phys_addr_t dst_addr;
514 uint32_t control_count;
515 uint32_t next;
516} ppc_sgl_t;
517#endif
518
519typedef struct {
520 unsigned int dmanr;
521 uint32_t control; /* channel ctrl word; loaded from each descrptr */
522 uint32_t sgl_control; /* LK, TCI, ETI, and ERI bits in sgl descriptor */
523 dma_addr_t dma_addr; /* dma (physical) address of this list */
524 ppc_sgl_t *phead;
525 dma_addr_t phead_dma;
526 ppc_sgl_t *ptail;
527 dma_addr_t ptail_dma;
528} sgl_list_info_t;
529
530typedef struct {
531 phys_addr_t *src_addr;
532 phys_addr_t *dst_addr;
533 phys_addr_t dma_src_addr;
534 phys_addr_t dma_dst_addr;
535} pci_alloc_desc_t;
536
537extern ppc_dma_ch_t dma_channels[];
538
539/*
540 * The DMA API are in ppc4xx_dma.c and ppc4xx_sgdma.c
541 */
542extern int ppc4xx_init_dma_channel(unsigned int, ppc_dma_ch_t *);
543extern int ppc4xx_get_channel_config(unsigned int, ppc_dma_ch_t *);
544extern int ppc4xx_set_channel_priority(unsigned int, unsigned int);
545extern unsigned int ppc4xx_get_peripheral_width(unsigned int);
546extern void ppc4xx_set_sg_addr(int, phys_addr_t);
547extern int ppc4xx_add_dma_sgl(sgl_handle_t, phys_addr_t, phys_addr_t, unsigned int);
548extern void ppc4xx_enable_dma_sgl(sgl_handle_t);
549extern void ppc4xx_disable_dma_sgl(sgl_handle_t);
550extern int ppc4xx_get_dma_sgl_residue(sgl_handle_t, phys_addr_t *, phys_addr_t *);
551extern int ppc4xx_delete_dma_sgl_element(sgl_handle_t, phys_addr_t *, phys_addr_t *);
552extern int ppc4xx_alloc_dma_handle(sgl_handle_t *, unsigned int, unsigned int);
553extern void ppc4xx_free_dma_handle(sgl_handle_t);
554extern int ppc4xx_get_dma_status(void);
555extern int ppc4xx_enable_burst(unsigned int);
556extern int ppc4xx_disable_burst(unsigned int);
557extern int ppc4xx_set_burst_size(unsigned int, unsigned int);
558extern void ppc4xx_set_src_addr(int dmanr, phys_addr_t src_addr);
559extern void ppc4xx_set_dst_addr(int dmanr, phys_addr_t dst_addr);
560extern void ppc4xx_enable_dma(unsigned int dmanr);
561extern void ppc4xx_disable_dma(unsigned int dmanr);
562extern void ppc4xx_set_dma_count(unsigned int dmanr, unsigned int count);
563extern int ppc4xx_get_dma_residue(unsigned int dmanr);
564extern void ppc4xx_set_dma_addr2(unsigned int dmanr, phys_addr_t src_dma_addr,
565 phys_addr_t dst_dma_addr);
566extern int ppc4xx_enable_dma_interrupt(unsigned int dmanr);
567extern int ppc4xx_disable_dma_interrupt(unsigned int dmanr);
568extern int ppc4xx_clr_dma_status(unsigned int dmanr);
569extern int ppc4xx_map_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
570extern int ppc4xx_disable_dma_port(unsigned int dmanr, unsigned int ocp_dma,short dma_chan);
571extern int ppc4xx_set_dma_mode(unsigned int dmanr, unsigned int mode);
572
573/* These are in kernel/dma.c: */
574
575/* reserve a DMA channel */
576extern int request_dma(unsigned int dmanr, const char *device_id);
577/* release it again */
578extern void free_dma(unsigned int dmanr);
579#endif
580#endif /* __KERNEL__ */