blob: d16afddb09a9ff23bc461d99e34f0657e56a58af [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ASM_DMA_MAPPING_H
2#define _ASM_DMA_MAPPING_H
3
4#include <asm/scatterlist.h>
5#include <asm/cache.h>
6
7void *dma_alloc_noncoherent(struct device *dev, size_t size,
Al Viro185a8ff2005-10-21 03:21:23 -04008 dma_addr_t *dma_handle, gfp_t flag);
Linus Torvalds1da177e2005-04-16 15:20:36 -07009
10void dma_free_noncoherent(struct device *dev, size_t size,
11 void *vaddr, dma_addr_t dma_handle);
12
13void *dma_alloc_coherent(struct device *dev, size_t size,
Al Viro185a8ff2005-10-21 03:21:23 -040014 dma_addr_t *dma_handle, gfp_t flag);
Linus Torvalds1da177e2005-04-16 15:20:36 -070015
16void dma_free_coherent(struct device *dev, size_t size,
17 void *vaddr, dma_addr_t dma_handle);
18
19extern dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size,
20 enum dma_data_direction direction);
21extern void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
22 size_t size, enum dma_data_direction direction);
23extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
24 enum dma_data_direction direction);
25extern dma_addr_t dma_map_page(struct device *dev, struct page *page,
26 unsigned long offset, size_t size, enum dma_data_direction direction);
Atsushi Nemoto4f29c052009-01-23 00:42:11 +090027
28static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
29 size_t size, enum dma_data_direction direction)
30{
31 dma_unmap_single(dev, dma_address, size, direction);
32}
33
Linus Torvalds1da177e2005-04-16 15:20:36 -070034extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
35 int nhwentries, enum dma_data_direction direction);
36extern void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle,
37 size_t size, enum dma_data_direction direction);
38extern void dma_sync_single_for_device(struct device *dev,
39 dma_addr_t dma_handle, size_t size, enum dma_data_direction direction);
40extern void dma_sync_single_range_for_cpu(struct device *dev,
41 dma_addr_t dma_handle, unsigned long offset, size_t size,
42 enum dma_data_direction direction);
43extern void dma_sync_single_range_for_device(struct device *dev,
44 dma_addr_t dma_handle, unsigned long offset, size_t size,
45 enum dma_data_direction direction);
46extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
47 int nelems, enum dma_data_direction direction);
48extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
49 int nelems, enum dma_data_direction direction);
FUJITA Tomonori8d8bb392008-07-25 19:44:49 -070050extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr);
Linus Torvalds1da177e2005-04-16 15:20:36 -070051extern int dma_supported(struct device *dev, u64 mask);
52
53static inline int
54dma_set_mask(struct device *dev, u64 mask)
55{
56 if(!dev->dma_mask || !dma_supported(dev, mask))
57 return -EIO;
58
59 *dev->dma_mask = mask;
60
61 return 0;
62}
63
64static inline int
65dma_get_cache_alignment(void)
66{
67 /* XXX Largest on any MIPS */
68 return 128;
69}
70
Ralf Baechlef67637e2006-12-06 20:38:54 -080071extern int dma_is_consistent(struct device *dev, dma_addr_t dma_addr);
Linus Torvalds1da177e2005-04-16 15:20:36 -070072
Ralf Baechled3fa72e2006-12-06 20:38:56 -080073extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
Linus Torvalds1da177e2005-04-16 15:20:36 -070074 enum dma_data_direction direction);
75
Ralf Baechle84b47a92007-02-12 22:22:53 +000076#if 0
Linus Torvalds1da177e2005-04-16 15:20:36 -070077#define ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
78
79extern int dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
80 dma_addr_t device_addr, size_t size, int flags);
81extern void dma_release_declared_memory(struct device *dev);
82extern void * dma_mark_declared_memory_occupied(struct device *dev,
83 dma_addr_t device_addr, size_t size);
Ralf Baechle84b47a92007-02-12 22:22:53 +000084#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070085
86#endif /* _ASM_DMA_MAPPING_H */