blob: aab4fd681e1f06b65b7a0f9e8f44f39dd1a85ba1 [file] [log] [blame]
Huacai Chen1299b0e2014-03-21 18:44:06 +08001#include <linux/mm.h>
2#include <linux/init.h>
3#include <linux/dma-mapping.h>
4#include <linux/scatterlist.h>
5#include <linux/swiotlb.h>
6#include <linux/bootmem.h>
7
8#include <asm/bootinfo.h>
9#include <boot_param.h>
10#include <dma-coherence.h>
11
12static void *loongson_dma_alloc_coherent(struct device *dev, size_t size,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070013 dma_addr_t *dma_handle, gfp_t gfp, unsigned long attrs)
Huacai Chen1299b0e2014-03-21 18:44:06 +080014{
15 void *ret;
16
Huacai Chen1299b0e2014-03-21 18:44:06 +080017 /* ignore region specifiers */
18 gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM);
19
20#ifdef CONFIG_ISA
21 if (dev == NULL)
22 gfp |= __GFP_DMA;
23 else
24#endif
25#ifdef CONFIG_ZONE_DMA
26 if (dev->coherent_dma_mask < DMA_BIT_MASK(32))
27 gfp |= __GFP_DMA;
28 else
29#endif
30#ifdef CONFIG_ZONE_DMA32
31 if (dev->coherent_dma_mask < DMA_BIT_MASK(40))
32 gfp |= __GFP_DMA32;
33 else
34#endif
35 ;
36 gfp |= __GFP_NORETRY;
37
38 ret = swiotlb_alloc_coherent(dev, size, dma_handle, gfp);
39 mb();
40 return ret;
41}
42
43static void loongson_dma_free_coherent(struct device *dev, size_t size,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070044 void *vaddr, dma_addr_t dma_handle, unsigned long attrs)
Huacai Chen1299b0e2014-03-21 18:44:06 +080045{
Huacai Chen1299b0e2014-03-21 18:44:06 +080046 swiotlb_free_coherent(dev, size, vaddr, dma_handle);
47}
48
49static dma_addr_t loongson_dma_map_page(struct device *dev, struct page *page,
50 unsigned long offset, size_t size,
51 enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070052 unsigned long attrs)
Huacai Chen1299b0e2014-03-21 18:44:06 +080053{
54 dma_addr_t daddr = swiotlb_map_page(dev, page, offset, size,
55 dir, attrs);
56 mb();
57 return daddr;
58}
59
60static int loongson_dma_map_sg(struct device *dev, struct scatterlist *sg,
61 int nents, enum dma_data_direction dir,
Krzysztof Kozlowski00085f12016-08-03 13:46:00 -070062 unsigned long attrs)
Huacai Chen1299b0e2014-03-21 18:44:06 +080063{
Alexander Duyck9f318d42016-12-14 15:04:58 -080064 int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, attrs);
Huacai Chen1299b0e2014-03-21 18:44:06 +080065 mb();
66
67 return r;
68}
69
70static void loongson_dma_sync_single_for_device(struct device *dev,
71 dma_addr_t dma_handle, size_t size,
72 enum dma_data_direction dir)
73{
74 swiotlb_sync_single_for_device(dev, dma_handle, size, dir);
75 mb();
76}
77
78static void loongson_dma_sync_sg_for_device(struct device *dev,
79 struct scatterlist *sg, int nents,
80 enum dma_data_direction dir)
81{
82 swiotlb_sync_sg_for_device(dev, sg, nents, dir);
83 mb();
84}
85
86static int loongson_dma_set_mask(struct device *dev, u64 mask)
87{
Christoph Hellwig452e06a2015-09-09 15:39:53 -070088 if (!dev->dma_mask || !dma_supported(dev, mask))
89 return -EIO;
90
Huacai Chen1299b0e2014-03-21 18:44:06 +080091 if (mask > DMA_BIT_MASK(loongson_sysconf.dma_mask_bits)) {
92 *dev->dma_mask = DMA_BIT_MASK(loongson_sysconf.dma_mask_bits);
93 return -EIO;
94 }
95
96 *dev->dma_mask = mask;
97
98 return 0;
99}
100
101dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
102{
Huacai Chenf4906822014-11-04 14:13:24 +0800103 long nid;
104#ifdef CONFIG_PHYS48_TO_HT40
105 /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
106 * Loongson-3's 48bit address space and embed it into 40bit */
107 nid = (paddr >> 44) & 0x3;
108 paddr = ((nid << 44) ^ paddr) | (nid << 37);
109#endif
Huacai Chen1299b0e2014-03-21 18:44:06 +0800110 return paddr;
111}
112
113phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
114{
Huacai Chenf4906822014-11-04 14:13:24 +0800115 long nid;
116#ifdef CONFIG_PHYS48_TO_HT40
117 /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
118 * Loongson-3's 48bit address space and embed it into 40bit */
119 nid = (daddr >> 37) & 0x3;
120 daddr = ((nid << 37) ^ daddr) | (nid << 44);
121#endif
Huacai Chen1299b0e2014-03-21 18:44:06 +0800122 return daddr;
123}
124
125static struct dma_map_ops loongson_dma_map_ops = {
126 .alloc = loongson_dma_alloc_coherent,
127 .free = loongson_dma_free_coherent,
128 .map_page = loongson_dma_map_page,
129 .unmap_page = swiotlb_unmap_page,
130 .map_sg = loongson_dma_map_sg,
131 .unmap_sg = swiotlb_unmap_sg_attrs,
132 .sync_single_for_cpu = swiotlb_sync_single_for_cpu,
133 .sync_single_for_device = loongson_dma_sync_single_for_device,
134 .sync_sg_for_cpu = swiotlb_sync_sg_for_cpu,
135 .sync_sg_for_device = loongson_dma_sync_sg_for_device,
136 .mapping_error = swiotlb_dma_mapping_error,
137 .dma_supported = swiotlb_dma_supported,
138 .set_dma_mask = loongson_dma_set_mask
139};
140
141void __init plat_swiotlb_setup(void)
142{
143 swiotlb_init(1);
144 mips_dma_map_ops = &loongson_dma_map_ops;
145}