blob: ccf7b4f34a3c4e070fb44cd7456c859ee5c3c645 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ASM_GENERIC_DMA_MAPPING_H
2#define _ASM_GENERIC_DMA_MAPPING_H
3
Dan Williams1b0fac42007-07-15 23:40:26 -07004/* define the dma api to allow compilation but not linking of
5 * dma dependent code. Code that depends on the dma-mapping
6 * API needs to set 'depends on HAS_DMA' in its Kconfig
7 */
Linus Torvalds1da177e2005-04-16 15:20:36 -07008
Dan Williams1b0fac42007-07-15 23:40:26 -07009struct scatterlist;
10
11extern void *
Linus Torvalds1da177e2005-04-16 15:20:36 -070012dma_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_handle,
Dan Williams1b0fac42007-07-15 23:40:26 -070013 gfp_t flag);
Linus Torvalds1da177e2005-04-16 15:20:36 -070014
Dan Williams1b0fac42007-07-15 23:40:26 -070015extern void
Linus Torvalds1da177e2005-04-16 15:20:36 -070016dma_free_coherent(struct device *dev, size_t size, void *cpu_addr,
Dan Williams1b0fac42007-07-15 23:40:26 -070017 dma_addr_t dma_handle);
Linus Torvalds1da177e2005-04-16 15:20:36 -070018
Heiko Carstens509cb372007-02-12 00:08:03 +010019#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
20#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
21
Dan Williams1b0fac42007-07-15 23:40:26 -070022extern dma_addr_t
23dma_map_single(struct device *dev, void *ptr, size_t size,
24 enum dma_data_direction direction);
25
26extern void
27dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,
28 enum dma_data_direction direction);
29
30extern int
31dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
32 enum dma_data_direction direction);
33
34extern void
35dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,
36 enum dma_data_direction direction);
37
38extern dma_addr_t
39dma_map_page(struct device *dev, struct page *page, unsigned long offset,
40 size_t size, enum dma_data_direction direction);
41
42extern void
43dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size,
44 enum dma_data_direction direction);
45
46extern void
47dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size,
48 enum dma_data_direction direction);
49
50extern void
51dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle,
52 unsigned long offset, size_t size,
53 enum dma_data_direction direction);
54
55extern void
56dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
57 enum dma_data_direction direction);
58
59#define dma_sync_single_for_device dma_sync_single_for_cpu
60#define dma_sync_single_range_for_device dma_sync_single_range_for_cpu
61#define dma_sync_sg_for_device dma_sync_sg_for_cpu
62
63extern int
FUJITA Tomonori8d8bb392008-07-25 19:44:49 -070064dma_mapping_error(struct device *dev, dma_addr_t dma_addr);
Dan Williams1b0fac42007-07-15 23:40:26 -070065
66extern int
67dma_supported(struct device *dev, u64 mask);
68
69extern int
70dma_set_mask(struct device *dev, u64 mask);
71
72extern int
73dma_get_cache_alignment(void);
74
Dan Williams1b0fac42007-07-15 23:40:26 -070075extern void
76dma_cache_sync(struct device *dev, void *vaddr, size_t size,
77 enum dma_data_direction direction);
78
Linus Torvalds1da177e2005-04-16 15:20:36 -070079#endif /* _ASM_GENERIC_DMA_MAPPING_H */