blob: ef9da3b5c543f33e300995ba9c5e908d0fc55d37 [file] [log] [blame]
Huacai Chen1299b0e2014-03-21 18:44:06 +08001#include <linux/mm.h>
2#include <linux/init.h>
3#include <linux/dma-mapping.h>
4#include <linux/scatterlist.h>
5#include <linux/swiotlb.h>
6#include <linux/bootmem.h>
7
8#include <asm/bootinfo.h>
9#include <boot_param.h>
10#include <dma-coherence.h>
11
12static void *loongson_dma_alloc_coherent(struct device *dev, size_t size,
13 dma_addr_t *dma_handle, gfp_t gfp, struct dma_attrs *attrs)
14{
15 void *ret;
16
Huacai Chen1299b0e2014-03-21 18:44:06 +080017 /* ignore region specifiers */
18 gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM);
19
20#ifdef CONFIG_ISA
21 if (dev == NULL)
22 gfp |= __GFP_DMA;
23 else
24#endif
25#ifdef CONFIG_ZONE_DMA
26 if (dev->coherent_dma_mask < DMA_BIT_MASK(32))
27 gfp |= __GFP_DMA;
28 else
29#endif
30#ifdef CONFIG_ZONE_DMA32
31 if (dev->coherent_dma_mask < DMA_BIT_MASK(40))
32 gfp |= __GFP_DMA32;
33 else
34#endif
35 ;
36 gfp |= __GFP_NORETRY;
37
38 ret = swiotlb_alloc_coherent(dev, size, dma_handle, gfp);
39 mb();
40 return ret;
41}
42
43static void loongson_dma_free_coherent(struct device *dev, size_t size,
44 void *vaddr, dma_addr_t dma_handle, struct dma_attrs *attrs)
45{
Huacai Chen1299b0e2014-03-21 18:44:06 +080046 swiotlb_free_coherent(dev, size, vaddr, dma_handle);
47}
48
49static dma_addr_t loongson_dma_map_page(struct device *dev, struct page *page,
50 unsigned long offset, size_t size,
51 enum dma_data_direction dir,
52 struct dma_attrs *attrs)
53{
54 dma_addr_t daddr = swiotlb_map_page(dev, page, offset, size,
55 dir, attrs);
56 mb();
57 return daddr;
58}
59
60static int loongson_dma_map_sg(struct device *dev, struct scatterlist *sg,
61 int nents, enum dma_data_direction dir,
62 struct dma_attrs *attrs)
63{
64 int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, NULL);
65 mb();
66
67 return r;
68}
69
70static void loongson_dma_sync_single_for_device(struct device *dev,
71 dma_addr_t dma_handle, size_t size,
72 enum dma_data_direction dir)
73{
74 swiotlb_sync_single_for_device(dev, dma_handle, size, dir);
75 mb();
76}
77
78static void loongson_dma_sync_sg_for_device(struct device *dev,
79 struct scatterlist *sg, int nents,
80 enum dma_data_direction dir)
81{
82 swiotlb_sync_sg_for_device(dev, sg, nents, dir);
83 mb();
84}
85
86static int loongson_dma_set_mask(struct device *dev, u64 mask)
87{
88 if (mask > DMA_BIT_MASK(loongson_sysconf.dma_mask_bits)) {
89 *dev->dma_mask = DMA_BIT_MASK(loongson_sysconf.dma_mask_bits);
90 return -EIO;
91 }
92
93 *dev->dma_mask = mask;
94
95 return 0;
96}
97
98dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
99{
Huacai Chenf4906822014-11-04 14:13:24 +0800100 long nid;
101#ifdef CONFIG_PHYS48_TO_HT40
102 /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
103 * Loongson-3's 48bit address space and embed it into 40bit */
104 nid = (paddr >> 44) & 0x3;
105 paddr = ((nid << 44) ^ paddr) | (nid << 37);
106#endif
Huacai Chen1299b0e2014-03-21 18:44:06 +0800107 return paddr;
108}
109
110phys_addr_t dma_to_phys(struct device *dev, dma_addr_t daddr)
111{
Huacai Chenf4906822014-11-04 14:13:24 +0800112 long nid;
113#ifdef CONFIG_PHYS48_TO_HT40
114 /* We extract 2bit node id (bit 44~47, only bit 44~45 used now) from
115 * Loongson-3's 48bit address space and embed it into 40bit */
116 nid = (daddr >> 37) & 0x3;
117 daddr = ((nid << 37) ^ daddr) | (nid << 44);
118#endif
Huacai Chen1299b0e2014-03-21 18:44:06 +0800119 return daddr;
120}
121
122static struct dma_map_ops loongson_dma_map_ops = {
123 .alloc = loongson_dma_alloc_coherent,
124 .free = loongson_dma_free_coherent,
125 .map_page = loongson_dma_map_page,
126 .unmap_page = swiotlb_unmap_page,
127 .map_sg = loongson_dma_map_sg,
128 .unmap_sg = swiotlb_unmap_sg_attrs,
129 .sync_single_for_cpu = swiotlb_sync_single_for_cpu,
130 .sync_single_for_device = loongson_dma_sync_single_for_device,
131 .sync_sg_for_cpu = swiotlb_sync_sg_for_cpu,
132 .sync_sg_for_device = loongson_dma_sync_sg_for_device,
133 .mapping_error = swiotlb_dma_mapping_error,
134 .dma_supported = swiotlb_dma_supported,
135 .set_dma_mask = loongson_dma_set_mask
136};
137
138void __init plat_swiotlb_setup(void)
139{
140 swiotlb_init(1);
141 mips_dma_map_ops = &loongson_dma_map_ops;
142}