blob: 6eb1ee548b11edeee7df508f9297e5d682db2547 [file] [log] [blame]
David Daney5b3b1682009-01-08 16:46:40 -08001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2006 Ralf Baechle <ralf@linux-mips.org>
7 *
8 *
9 * Similar to mach-generic/dma-coherence.h except
10 * plat_device_is_coherent hard coded to return 1.
11 *
12 */
13#ifndef __ASM_MACH_CAVIUM_OCTEON_DMA_COHERENCE_H
14#define __ASM_MACH_CAVIUM_OCTEON_DMA_COHERENCE_H
15
David Daney368b0622013-06-20 16:10:50 +020016#include <linux/bug.h>
17
David Daney5b3b1682009-01-08 16:46:40 -080018struct device;
19
David Daneyb93b2ab2010-10-01 13:27:34 -070020extern void octeon_pci_dma_init(void);
David Daney5b3b1682009-01-08 16:46:40 -080021
22static inline dma_addr_t plat_map_dma_mem(struct device *dev, void *addr,
23 size_t size)
24{
David Daneyb93b2ab2010-10-01 13:27:34 -070025 BUG();
David Daney368b0622013-06-20 16:10:50 +020026 return 0;
David Daney5b3b1682009-01-08 16:46:40 -080027}
28
29static inline dma_addr_t plat_map_dma_mem_page(struct device *dev,
30 struct page *page)
31{
David Daney48e1fd52010-10-01 13:27:32 -070032 BUG();
David Daney368b0622013-06-20 16:10:50 +020033 return 0;
David Daney5b3b1682009-01-08 16:46:40 -080034}
35
Kevin Cernekee3807ef3f62009-04-23 17:25:12 -070036static inline unsigned long plat_dma_addr_to_phys(struct device *dev,
37 dma_addr_t dma_addr)
David Daney5b3b1682009-01-08 16:46:40 -080038{
David Daneyb93b2ab2010-10-01 13:27:34 -070039 BUG();
David Daney368b0622013-06-20 16:10:50 +020040 return 0;
David Daney5b3b1682009-01-08 16:46:40 -080041}
42
Kevin Cernekeed3f634b2009-04-23 17:03:43 -070043static inline void plat_unmap_dma_mem(struct device *dev, dma_addr_t dma_addr,
44 size_t size, enum dma_data_direction direction)
David Daney5b3b1682009-01-08 16:46:40 -080045{
David Daneyb93b2ab2010-10-01 13:27:34 -070046 BUG();
David Daney5b3b1682009-01-08 16:46:40 -080047}
48
49static inline int plat_dma_supported(struct device *dev, u64 mask)
50{
David Daneyb93b2ab2010-10-01 13:27:34 -070051 BUG();
David Daney368b0622013-06-20 16:10:50 +020052 return 0;
David Daney5b3b1682009-01-08 16:46:40 -080053}
54
David Daney5b3b1682009-01-08 16:46:40 -080055static inline int plat_device_is_coherent(struct device *dev)
56{
57 return 1;
58}
59
Ralf Baechle0acbfc62015-03-27 15:10:30 +010060static inline void plat_post_dma_flush(struct device *dev)
61{
62}
63
Christoph Hellwigea8c64a2018-01-10 16:21:13 +010064static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
65{
66 if (!dev->dma_mask)
67 return false;
68
69 return addr + size - 1 <= *dev->dma_mask;
70}
71
Christoph Hellwigb6e05472018-03-19 11:38:24 +010072dma_addr_t __phys_to_dma(struct device *dev, phys_addr_t paddr);
73phys_addr_t __dma_to_phys(struct device *dev, dma_addr_t daddr);
David Daneyb93b2ab2010-10-01 13:27:34 -070074
75struct dma_map_ops;
Bart Van Assche52997092017-01-20 13:04:01 -080076extern const struct dma_map_ops *octeon_pci_dma_map_ops;
David Daneyb93b2ab2010-10-01 13:27:34 -070077extern char *octeon_swiotlb;
78
David Daney5b3b1682009-01-08 16:46:40 -080079#endif /* __ASM_MACH_CAVIUM_OCTEON_DMA_COHERENCE_H */