Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 1 | /* |
| 2 | * drivers/staging/android/ion/ion_cma_heap.c |
| 3 | * |
| 4 | * Copyright (C) Linaro 2012 |
| 5 | * Author: <benjamin.gaignard@linaro.org> for ST-Ericsson. |
| 6 | * |
| 7 | * This software is licensed under the terms of the GNU General Public |
| 8 | * License version 2, as published by the Free Software Foundation, and |
| 9 | * may be copied, distributed, and modified under those terms. |
| 10 | * |
| 11 | * This program is distributed in the hope that it will be useful, |
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 14 | * GNU General Public License for more details. |
| 15 | * |
| 16 | */ |
| 17 | |
| 18 | #include <linux/device.h> |
| 19 | #include <linux/slab.h> |
| 20 | #include <linux/errno.h> |
| 21 | #include <linux/err.h> |
| 22 | #include <linux/dma-mapping.h> |
| 23 | |
| 24 | #include "ion.h" |
| 25 | #include "ion_priv.h" |
| 26 | |
| 27 | #define ION_CMA_ALLOCATE_FAILED -1 |
| 28 | |
| 29 | struct ion_cma_heap { |
| 30 | struct ion_heap heap; |
| 31 | struct device *dev; |
| 32 | }; |
| 33 | |
| 34 | #define to_cma_heap(x) container_of(x, struct ion_cma_heap, heap) |
| 35 | |
| 36 | struct ion_cma_buffer_info { |
| 37 | void *cpu_addr; |
| 38 | dma_addr_t handle; |
| 39 | struct sg_table *table; |
| 40 | }; |
| 41 | |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 42 | |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 43 | /* ION CMA heap operations functions */ |
| 44 | static int ion_cma_allocate(struct ion_heap *heap, struct ion_buffer *buffer, |
| 45 | unsigned long len, unsigned long align, |
| 46 | unsigned long flags) |
| 47 | { |
| 48 | struct ion_cma_heap *cma_heap = to_cma_heap(heap); |
| 49 | struct device *dev = cma_heap->dev; |
| 50 | struct ion_cma_buffer_info *info; |
| 51 | |
| 52 | dev_dbg(dev, "Request buffer allocation len %ld\n", len); |
| 53 | |
Colin Cross | 661f82f | 2013-12-13 19:26:32 -0800 | [diff] [blame] | 54 | if (buffer->flags & ION_FLAG_CACHED) |
| 55 | return -EINVAL; |
| 56 | |
| 57 | if (align > PAGE_SIZE) |
| 58 | return -EINVAL; |
| 59 | |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 60 | info = kzalloc(sizeof(struct ion_cma_buffer_info), GFP_KERNEL); |
Phong Tran | f0ca3e8 | 2014-07-20 11:10:55 +0700 | [diff] [blame] | 61 | if (!info) |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 62 | return ION_CMA_ALLOCATE_FAILED; |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 63 | |
Colin Cross | 661f82f | 2013-12-13 19:26:32 -0800 | [diff] [blame] | 64 | info->cpu_addr = dma_alloc_coherent(dev, len, &(info->handle), |
| 65 | GFP_HIGHUSER | __GFP_ZERO); |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 66 | |
| 67 | if (!info->cpu_addr) { |
| 68 | dev_err(dev, "Fail to allocate buffer\n"); |
| 69 | goto err; |
| 70 | } |
| 71 | |
| 72 | info->table = kmalloc(sizeof(struct sg_table), GFP_KERNEL); |
Phong Tran | f0ca3e8 | 2014-07-20 11:10:55 +0700 | [diff] [blame] | 73 | if (!info->table) |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 74 | goto free_mem; |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 75 | |
Jignesh R Patel | 936d61e | 2015-07-28 16:19:36 +0530 | [diff] [blame] | 76 | if (dma_get_sgtable(dev, info->table, info->cpu_addr, info->handle, |
| 77 | len)) |
Rebecca Schultz Zavin | c13bd1c | 2013-12-13 14:24:45 -0800 | [diff] [blame] | 78 | goto free_table; |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 79 | /* keep this for memory release */ |
| 80 | buffer->priv_virt = info; |
Laura Abbott | f82ad60 | 2016-08-08 09:52:56 -0700 | [diff] [blame] | 81 | buffer->sg_table = info->table; |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 82 | dev_dbg(dev, "Allocate buffer %p\n", buffer); |
| 83 | return 0; |
| 84 | |
| 85 | free_table: |
| 86 | kfree(info->table); |
| 87 | free_mem: |
| 88 | dma_free_coherent(dev, len, info->cpu_addr, info->handle); |
| 89 | err: |
| 90 | kfree(info); |
| 91 | return ION_CMA_ALLOCATE_FAILED; |
| 92 | } |
| 93 | |
| 94 | static void ion_cma_free(struct ion_buffer *buffer) |
| 95 | { |
| 96 | struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); |
| 97 | struct device *dev = cma_heap->dev; |
| 98 | struct ion_cma_buffer_info *info = buffer->priv_virt; |
| 99 | |
| 100 | dev_dbg(dev, "Release buffer %p\n", buffer); |
| 101 | /* release memory */ |
| 102 | dma_free_coherent(dev, buffer->size, info->cpu_addr, info->handle); |
| 103 | /* release sg table */ |
| 104 | sg_free_table(info->table); |
| 105 | kfree(info->table); |
| 106 | kfree(info); |
| 107 | } |
| 108 | |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 109 | static int ion_cma_mmap(struct ion_heap *mapper, struct ion_buffer *buffer, |
| 110 | struct vm_area_struct *vma) |
| 111 | { |
| 112 | struct ion_cma_heap *cma_heap = to_cma_heap(buffer->heap); |
| 113 | struct device *dev = cma_heap->dev; |
| 114 | struct ion_cma_buffer_info *info = buffer->priv_virt; |
| 115 | |
| 116 | return dma_mmap_coherent(dev, vma, info->cpu_addr, info->handle, |
| 117 | buffer->size); |
| 118 | } |
| 119 | |
Colin Cross | f63958d | 2013-12-13 19:26:28 -0800 | [diff] [blame] | 120 | static void *ion_cma_map_kernel(struct ion_heap *heap, |
| 121 | struct ion_buffer *buffer) |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 122 | { |
| 123 | struct ion_cma_buffer_info *info = buffer->priv_virt; |
| 124 | /* kernel memory mapping has been done at allocation time */ |
| 125 | return info->cpu_addr; |
| 126 | } |
| 127 | |
Colin Cross | 661f82f | 2013-12-13 19:26:32 -0800 | [diff] [blame] | 128 | static void ion_cma_unmap_kernel(struct ion_heap *heap, |
Ben LeMasurier | 679011b | 2016-08-22 07:45:53 -0600 | [diff] [blame] | 129 | struct ion_buffer *buffer) |
Colin Cross | 661f82f | 2013-12-13 19:26:32 -0800 | [diff] [blame] | 130 | { |
| 131 | } |
| 132 | |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 133 | static struct ion_heap_ops ion_cma_ops = { |
| 134 | .allocate = ion_cma_allocate, |
| 135 | .free = ion_cma_free, |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 136 | .map_user = ion_cma_mmap, |
| 137 | .map_kernel = ion_cma_map_kernel, |
Colin Cross | 661f82f | 2013-12-13 19:26:32 -0800 | [diff] [blame] | 138 | .unmap_kernel = ion_cma_unmap_kernel, |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 139 | }; |
| 140 | |
| 141 | struct ion_heap *ion_cma_heap_create(struct ion_platform_heap *data) |
| 142 | { |
| 143 | struct ion_cma_heap *cma_heap; |
| 144 | |
| 145 | cma_heap = kzalloc(sizeof(struct ion_cma_heap), GFP_KERNEL); |
| 146 | |
| 147 | if (!cma_heap) |
| 148 | return ERR_PTR(-ENOMEM); |
| 149 | |
| 150 | cma_heap->heap.ops = &ion_cma_ops; |
Sriram Raghunathan | 7e41617 | 2015-09-22 22:35:51 +0530 | [diff] [blame] | 151 | /* |
| 152 | * get device from private heaps data, later it will be |
| 153 | * used to make the link with reserved CMA memory |
| 154 | */ |
Benjamin Gaignard | 349c9e1 | 2013-12-13 14:24:44 -0800 | [diff] [blame] | 155 | cma_heap->dev = data->priv; |
| 156 | cma_heap->heap.type = ION_HEAP_TYPE_DMA; |
| 157 | return &cma_heap->heap; |
| 158 | } |
| 159 | |
| 160 | void ion_cma_heap_destroy(struct ion_heap *heap) |
| 161 | { |
| 162 | struct ion_cma_heap *cma_heap = to_cma_heap(heap); |
| 163 | |
| 164 | kfree(cma_heap); |
| 165 | } |