blob: 360b3387182af251713106cb304ac85ca78c5ca5 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ASM_DMA_MAPPING_H
2#define _ASM_DMA_MAPPING_H
3
Christoph Hellwig84be4562015-05-01 12:46:15 +02004#include <linux/scatterlist.h>
Steven J. Hillb6d92b42013-03-25 13:47:29 -05005#include <asm/dma-coherence.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07006#include <asm/cache.h>
Yoichi Yuasaf8ac04252009-06-04 00:16:04 +09007#include <asm-generic/dma-coherent.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07008
Ralf Baechle70342282013-01-22 12:59:30 +01009#ifndef CONFIG_SGI_IP27 /* Kludge to fix 2.6.39 build for IP27 */
David Daney48e1fd52010-10-01 13:27:32 -070010#include <dma-coherence.h>
Ralf Baechlea5602a32011-05-18 13:14:36 +010011#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070012
David Daney48e1fd52010-10-01 13:27:32 -070013extern struct dma_map_ops *mips_dma_map_ops;
Linus Torvalds1da177e2005-04-16 15:20:36 -070014
David Daney48e1fd52010-10-01 13:27:32 -070015static inline struct dma_map_ops *get_dma_ops(struct device *dev)
Atsushi Nemoto4f29c052009-01-23 00:42:11 +090016{
David Daney48e1fd52010-10-01 13:27:32 -070017 if (dev && dev->archdata.dma_ops)
18 return dev->archdata.dma_ops;
19 else
20 return mips_dma_map_ops;
Atsushi Nemoto4f29c052009-01-23 00:42:11 +090021}
22
David Daney48e1fd52010-10-01 13:27:32 -070023static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
24{
25 if (!dev->dma_mask)
Joe Perches3db27422015-03-30 16:46:03 -070026 return false;
David Daney48e1fd52010-10-01 13:27:32 -070027
28 return addr + size <= *dev->dma_mask;
29}
30
31static inline void dma_mark_clean(void *addr, size_t size) {}
32
33#include <asm-generic/dma-mapping-common.h>
34
35static inline int dma_supported(struct device *dev, u64 mask)
36{
37 struct dma_map_ops *ops = get_dma_ops(dev);
38 return ops->dma_supported(dev, mask);
39}
40
41static inline int dma_mapping_error(struct device *dev, u64 mask)
42{
43 struct dma_map_ops *ops = get_dma_ops(dev);
Shuah Khan9c83b07c2012-11-23 14:34:56 -070044
45 debug_dma_mapping_error(dev, mask);
David Daney48e1fd52010-10-01 13:27:32 -070046 return ops->mapping_error(dev, mask);
47}
Linus Torvalds1da177e2005-04-16 15:20:36 -070048
49static inline int
50dma_set_mask(struct device *dev, u64 mask)
51{
Huacai Chen1299b0e2014-03-21 18:44:06 +080052 struct dma_map_ops *ops = get_dma_ops(dev);
53
Linus Torvalds1da177e2005-04-16 15:20:36 -070054 if(!dev->dma_mask || !dma_supported(dev, mask))
55 return -EIO;
56
Huacai Chen1299b0e2014-03-21 18:44:06 +080057 if (ops->set_dma_mask)
58 return ops->set_dma_mask(dev, mask);
59
Linus Torvalds1da177e2005-04-16 15:20:36 -070060 *dev->dma_mask = mask;
61
62 return 0;
63}
64
Ralf Baechled3fa72e2006-12-06 20:38:56 -080065extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
Linus Torvalds1da177e2005-04-16 15:20:36 -070066 enum dma_data_direction direction);
67
Andrzej Pietrasiewicze8d51e52012-03-27 14:32:21 +020068#define dma_alloc_coherent(d,s,h,f) dma_alloc_attrs(d,s,h,f,NULL)
69
70static inline void *dma_alloc_attrs(struct device *dev, size_t size,
71 dma_addr_t *dma_handle, gfp_t gfp,
72 struct dma_attrs *attrs)
David Daney48e1fd52010-10-01 13:27:32 -070073{
74 void *ret;
75 struct dma_map_ops *ops = get_dma_ops(dev);
76
Andrzej Pietrasiewicze8d51e52012-03-27 14:32:21 +020077 ret = ops->alloc(dev, size, dma_handle, gfp, attrs);
David Daney48e1fd52010-10-01 13:27:32 -070078
79 debug_dma_alloc_coherent(dev, size, *dma_handle, ret);
80
81 return ret;
82}
83
Andrzej Pietrasiewicze8d51e52012-03-27 14:32:21 +020084#define dma_free_coherent(d,s,c,h) dma_free_attrs(d,s,c,h,NULL)
85
86static inline void dma_free_attrs(struct device *dev, size_t size,
87 void *vaddr, dma_addr_t dma_handle,
88 struct dma_attrs *attrs)
David Daney48e1fd52010-10-01 13:27:32 -070089{
90 struct dma_map_ops *ops = get_dma_ops(dev);
91
Andrzej Pietrasiewicze8d51e52012-03-27 14:32:21 +020092 ops->free(dev, size, vaddr, dma_handle, attrs);
David Daney48e1fd52010-10-01 13:27:32 -070093
94 debug_dma_free_coherent(dev, size, vaddr, dma_handle);
95}
96
97
98void *dma_alloc_noncoherent(struct device *dev, size_t size,
99 dma_addr_t *dma_handle, gfp_t flag);
100
101void dma_free_noncoherent(struct device *dev, size_t size,
102 void *vaddr, dma_addr_t dma_handle);
103
Linus Torvalds1da177e2005-04-16 15:20:36 -0700104#endif /* _ASM_DMA_MAPPING_H */