blob: 9d395961e71380484e7813f42240e6dacf5e6714 [file] [log] [blame]
Jan Glauber828b35f2012-11-29 14:33:30 +01001#ifndef _ASM_S390_DMA_MAPPING_H
2#define _ASM_S390_DMA_MAPPING_H
3
4#include <linux/kernel.h>
5#include <linux/types.h>
6#include <linux/mm.h>
7#include <linux/scatterlist.h>
8#include <linux/dma-attrs.h>
9#include <linux/dma-debug.h>
10#include <linux/io.h>
11
12#define DMA_ERROR_CODE (~(dma_addr_t) 0x0)
13
14extern struct dma_map_ops s390_dma_ops;
15
16static inline struct dma_map_ops *get_dma_ops(struct device *dev)
17{
18 return &s390_dma_ops;
19}
20
21extern int dma_set_mask(struct device *dev, u64 mask);
Heiko Carstense9789482013-01-30 14:16:02 +010022
23static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
24 enum dma_data_direction direction)
25{
26}
Jan Glauber828b35f2012-11-29 14:33:30 +010027
28#define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
29#define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
30
31#include <asm-generic/dma-mapping-common.h>
32
33static inline int dma_supported(struct device *dev, u64 mask)
34{
35 struct dma_map_ops *dma_ops = get_dma_ops(dev);
36
37 if (dma_ops->dma_supported == NULL)
38 return 1;
39 return dma_ops->dma_supported(dev, mask);
40}
41
42static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
43{
44 if (!dev->dma_mask)
Joe Perches1c6e4b12015-03-30 16:46:05 -070045 return false;
Jan Glauber828b35f2012-11-29 14:33:30 +010046 return addr + size - 1 <= *dev->dma_mask;
47}
48
49static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
50{
51 struct dma_map_ops *dma_ops = get_dma_ops(dev);
52
Sebastian Ott40260992013-06-18 17:38:31 +020053 debug_dma_mapping_error(dev, dma_addr);
Jan Glauber828b35f2012-11-29 14:33:30 +010054 if (dma_ops->mapping_error)
55 return dma_ops->mapping_error(dev, dma_addr);
Sebastian Otta9a5250cc2013-06-21 19:00:27 +020056 return dma_addr == DMA_ERROR_CODE;
Jan Glauber828b35f2012-11-29 14:33:30 +010057}
58
Thierry Reding90114d62014-08-25 13:02:52 +020059#define dma_alloc_coherent(d, s, h, f) dma_alloc_attrs(d, s, h, f, NULL)
60
61static inline void *dma_alloc_attrs(struct device *dev, size_t size,
62 dma_addr_t *dma_handle, gfp_t flags,
63 struct dma_attrs *attrs)
Jan Glauber828b35f2012-11-29 14:33:30 +010064{
65 struct dma_map_ops *ops = get_dma_ops(dev);
Thierry Reding90114d62014-08-25 13:02:52 +020066 void *cpu_addr;
Jan Glauber828b35f2012-11-29 14:33:30 +010067
Thierry Reding90114d62014-08-25 13:02:52 +020068 BUG_ON(!ops);
69
70 cpu_addr = ops->alloc(dev, size, dma_handle, flags, attrs);
71 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr);
72
73 return cpu_addr;
Jan Glauber828b35f2012-11-29 14:33:30 +010074}
75
Thierry Reding90114d62014-08-25 13:02:52 +020076#define dma_free_coherent(d, s, c, h) dma_free_attrs(d, s, c, h, NULL)
77
78static inline void dma_free_attrs(struct device *dev, size_t size,
79 void *cpu_addr, dma_addr_t dma_handle,
80 struct dma_attrs *attrs)
Jan Glauber828b35f2012-11-29 14:33:30 +010081{
Thierry Reding90114d62014-08-25 13:02:52 +020082 struct dma_map_ops *ops = get_dma_ops(dev);
83
84 BUG_ON(!ops);
Jan Glauber828b35f2012-11-29 14:33:30 +010085
Jan Glauber828b35f2012-11-29 14:33:30 +010086 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle);
Thierry Reding90114d62014-08-25 13:02:52 +020087 ops->free(dev, size, cpu_addr, dma_handle, attrs);
Jan Glauber828b35f2012-11-29 14:33:30 +010088}
89
90#endif /* _ASM_S390_DMA_MAPPING_H */