Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2020 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #define LOG_TAG "DMABUFHEAPS" |
| 18 | |
| 19 | #include <BufferAllocator/BufferAllocator.h> |
| 20 | |
| 21 | #include <errno.h> |
| 22 | #include <fcntl.h> |
| 23 | #include <ion/ion.h> |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 24 | #include <linux/dma-buf.h> |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 25 | #include <linux/dma-heap.h> |
| 26 | #include <linux/ion_4.12.h> |
| 27 | #include <stdlib.h> |
| 28 | #include <sys/types.h> |
| 29 | #include <unistd.h> |
| 30 | |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 31 | #include <shared_mutex> |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 32 | #include <string> |
Hridya Valsaraju | ac7673b | 2021-02-02 11:05:27 -0800 | [diff] [blame] | 33 | #include <unordered_set> |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 34 | |
| 35 | #include <android-base/logging.h> |
| 36 | #include <android-base/unique_fd.h> |
| 37 | |
| 38 | static constexpr char kDmaHeapRoot[] = "/dev/dma_heap/"; |
| 39 | static constexpr char kIonDevice[] = "/dev/ion"; |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 40 | static constexpr char kIonSystemHeapName[] = "ion_system_heap"; |
| 41 | |
| 42 | void BufferAllocator::LogInterface(const std::string& interface) { |
| 43 | if (!logged_interface_) { |
| 44 | LOG(INFO) << "Using : " << interface; |
| 45 | logged_interface_ = true; |
| 46 | } |
| 47 | } |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 48 | |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 49 | int BufferAllocator::OpenDmabufHeap(const std::string& heap_name) { |
| 50 | std::shared_lock<std::shared_mutex> slock(dmabuf_heap_fd_mutex_); |
| 51 | |
| 52 | /* Check if heap has already been opened. */ |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 53 | auto it = dmabuf_heap_fds_.find(heap_name); |
| 54 | if (it != dmabuf_heap_fds_.end()) |
| 55 | return it->second; |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 56 | |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 57 | slock.unlock(); |
| 58 | |
| 59 | /* |
| 60 | * Heap device needs to be opened, use a unique_lock since dmabuf_heap_fd_ |
| 61 | * needs to be modified. |
| 62 | */ |
| 63 | std::unique_lock<std::shared_mutex> ulock(dmabuf_heap_fd_mutex_); |
| 64 | |
| 65 | /* |
| 66 | * Check if we already opened this heap again to prevent racing threads from |
| 67 | * opening the heap device multiple times. |
| 68 | */ |
| 69 | it = dmabuf_heap_fds_.find(heap_name); |
| 70 | if (it != dmabuf_heap_fds_.end()) return it->second; |
| 71 | |
| 72 | std::string heap_path = kDmaHeapRoot + heap_name; |
| 73 | int fd = TEMP_FAILURE_RETRY(open(heap_path.c_str(), O_RDONLY | O_CLOEXEC)); |
| 74 | if (fd < 0) return -errno; |
| 75 | |
| 76 | LOG(INFO) << "Using DMA-BUF heap named: " << heap_name; |
| 77 | |
| 78 | auto ret = dmabuf_heap_fds_.insert({heap_name, android::base::unique_fd(fd)}); |
| 79 | CHECK(ret.second); |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 80 | return fd; |
| 81 | } |
| 82 | |
| 83 | void BufferAllocator::QueryIonHeaps() { |
| 84 | uses_legacy_ion_iface_ = ion_is_legacy(ion_fd_); |
| 85 | if (uses_legacy_ion_iface_) { |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 86 | LogInterface("Legacy ion heaps"); |
Hridya Valsaraju | f958e85 | 2021-01-21 15:00:42 -0800 | [diff] [blame] | 87 | MapNameToIonMask(kDmabufSystemHeapName, ION_HEAP_SYSTEM_MASK, ION_FLAG_CACHED); |
| 88 | MapNameToIonMask(kDmabufSystemUncachedHeapName, ION_HEAP_SYSTEM_MASK); |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 89 | return; |
| 90 | } |
| 91 | |
| 92 | int heap_count; |
| 93 | int ret = ion_query_heap_cnt(ion_fd_, &heap_count); |
| 94 | if (ret == 0) { |
| 95 | ion_heap_info_.resize(heap_count, {}); |
| 96 | ret = ion_query_get_heaps(ion_fd_, heap_count, ion_heap_info_.data()); |
| 97 | } |
| 98 | |
| 99 | // Abort if heap query fails |
| 100 | CHECK(ret == 0) |
| 101 | << "Non-legacy ION implementation must support heap information queries"; |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 102 | LogInterface("Non-legacy ION heaps"); |
| 103 | |
| 104 | /* |
| 105 | * No error checking here, it is possible that devices may have used another name for |
| 106 | * the ion system heap. |
| 107 | */ |
Hridya Valsaraju | f958e85 | 2021-01-21 15:00:42 -0800 | [diff] [blame] | 108 | MapNameToIonName(kDmabufSystemHeapName, kIonSystemHeapName, ION_FLAG_CACHED); |
| 109 | MapNameToIonName(kDmabufSystemUncachedHeapName, kIonSystemHeapName); |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 110 | } |
| 111 | |
| 112 | BufferAllocator::BufferAllocator() { |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 113 | ion_fd_.reset(TEMP_FAILURE_RETRY(open(kIonDevice, O_RDONLY| O_CLOEXEC))); |
| 114 | if (ion_fd_ >= 0) |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 115 | QueryIonHeaps(); |
Hridya Valsaraju | 0a83387 | 2020-05-13 10:43:30 -0700 | [diff] [blame] | 116 | } |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 117 | |
| 118 | int BufferAllocator::MapNameToIonMask(const std::string& heap_name, unsigned int ion_heap_mask, |
| 119 | unsigned int ion_heap_flags) { |
| 120 | if (!ion_heap_mask) |
| 121 | return -EINVAL; |
| 122 | IonHeapConfig heap_config = { ion_heap_mask, ion_heap_flags }; |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 123 | |
| 124 | std::unique_lock<std::shared_mutex> ulock(heap_name_to_config_mutex_); |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 125 | heap_name_to_config_[heap_name] = heap_config; |
| 126 | return 0; |
| 127 | } |
| 128 | |
| 129 | int BufferAllocator::GetIonHeapIdByName(const std::string& heap_name, unsigned int* heap_id) { |
| 130 | for (auto& it : ion_heap_info_) { |
| 131 | if (heap_name == it.name) { |
| 132 | *heap_id = it.heap_id; |
| 133 | return 0; |
| 134 | } |
| 135 | } |
| 136 | |
| 137 | LOG(ERROR) << "No ion heap of name " << heap_name << " exists"; |
| 138 | return -EINVAL; |
| 139 | } |
| 140 | |
| 141 | int BufferAllocator::MapNameToIonName(const std::string& heap_name, |
| 142 | const std::string& ion_heap_name, |
| 143 | unsigned int ion_heap_flags) { |
| 144 | unsigned int ion_heap_id = 0; |
| 145 | auto ret = GetIonHeapIdByName(ion_heap_name, &ion_heap_id); |
| 146 | if (ret < 0) |
| 147 | return ret; |
| 148 | |
| 149 | unsigned int ion_heap_mask = 1 << ion_heap_id; |
| 150 | IonHeapConfig heap_config = { ion_heap_mask, ion_heap_flags }; |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 151 | |
| 152 | std::unique_lock<std::shared_mutex> ulock(heap_name_to_config_mutex_); |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 153 | heap_name_to_config_[heap_name] = heap_config; |
| 154 | |
| 155 | return 0; |
| 156 | } |
| 157 | |
| 158 | int BufferAllocator::MapNameToIonHeap(const std::string& heap_name, |
| 159 | const std::string& ion_heap_name, |
| 160 | unsigned int ion_heap_flags, |
| 161 | unsigned int legacy_ion_heap_mask, |
| 162 | unsigned int legacy_ion_heap_flags) { |
John Stultz | df362cd | 2020-11-11 06:29:06 +0000 | [diff] [blame] | 163 | /* if the DMA-BUF Heap exists, we can ignore ion mappings */ |
| 164 | int ret = OpenDmabufHeap(heap_name); |
| 165 | if (ret >= 0) |
| 166 | return 0; |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 167 | |
Hridya Valsaraju | 0f2cc9e | 2021-08-11 11:15:12 -0700 | [diff] [blame] | 168 | /* If ION support is not detected, ignore the mappings */ |
| 169 | if (ion_fd_ < 0) return 0; |
| 170 | |
John Stultz | ba7e8e5 | 2020-09-03 04:23:34 +0000 | [diff] [blame] | 171 | if (uses_legacy_ion_iface_ || ion_heap_name == "") { |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 172 | ret = MapNameToIonMask(heap_name, legacy_ion_heap_mask, legacy_ion_heap_flags); |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 173 | } else if (!ion_heap_name.empty()) { |
Hridya Valsaraju | 4e5cebe | 2020-05-25 21:50:40 -0700 | [diff] [blame] | 174 | ret = MapNameToIonName(heap_name, ion_heap_name, ion_heap_flags); |
| 175 | } |
| 176 | |
| 177 | return ret; |
| 178 | } |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 179 | |
| 180 | int BufferAllocator::GetIonConfig(const std::string& heap_name, IonHeapConfig& heap_config) { |
| 181 | int ret = 0; |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 182 | |
| 183 | std::shared_lock<std::shared_mutex> slock(heap_name_to_config_mutex_); |
| 184 | |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 185 | auto it = heap_name_to_config_.find(heap_name); |
| 186 | if (it != heap_name_to_config_.end()) { |
| 187 | heap_config = it->second; |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 188 | return ret; |
| 189 | } |
| 190 | |
| 191 | slock.unlock(); |
| 192 | |
| 193 | if (uses_legacy_ion_iface_) { |
| 194 | ret = -EINVAL; |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 195 | } else { |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 196 | unsigned int heap_id; |
| 197 | ret = GetIonHeapIdByName(heap_name, &heap_id); |
| 198 | if (ret == 0) { |
| 199 | heap_config.mask = 1 << heap_id; |
| 200 | heap_config.flags = 0; |
| 201 | /* save it so that this lookup does not need to happen again */ |
| 202 | std::unique_lock<std::shared_mutex> ulock(heap_name_to_config_mutex_); |
| 203 | heap_name_to_config_[heap_name] = heap_config; |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 204 | } |
| 205 | } |
| 206 | |
| 207 | if (ret) |
| 208 | LOG(ERROR) << "No ion heap of name " << heap_name << " exists"; |
| 209 | return ret; |
| 210 | } |
| 211 | |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 212 | int BufferAllocator::DmabufAlloc(const std::string& heap_name, size_t len) { |
| 213 | int fd = OpenDmabufHeap(heap_name); |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 214 | if (fd < 0) return fd; |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 215 | |
| 216 | struct dma_heap_allocation_data heap_data{ |
| 217 | .len = len, // length of data to be allocated in bytes |
| 218 | .fd_flags = O_RDWR | O_CLOEXEC, // permissions for the memory to be allocated |
| 219 | }; |
| 220 | |
| 221 | auto ret = TEMP_FAILURE_RETRY(ioctl(fd, DMA_HEAP_IOCTL_ALLOC, &heap_data)); |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 222 | if (ret < 0) { |
Hridya Valsaraju | cff13da | 2021-03-12 17:01:01 -0800 | [diff] [blame] | 223 | PLOG(ERROR) << "Unable to allocate from DMA-BUF heap: " << heap_name; |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 224 | return ret; |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 225 | } |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 226 | |
| 227 | return heap_data.fd; |
| 228 | } |
| 229 | |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 230 | int BufferAllocator::IonAlloc(const std::string& heap_name, size_t len, |
| 231 | unsigned int heap_flags, size_t legacy_align) { |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 232 | IonHeapConfig heap_config; |
| 233 | auto ret = GetIonConfig(heap_name, heap_config); |
| 234 | if (ret) |
| 235 | return ret; |
| 236 | |
| 237 | int alloc_fd = -1; |
| 238 | unsigned int flags = heap_config.flags | heap_flags; |
John Stultz | fb27952 | 2020-09-03 04:58:29 +0000 | [diff] [blame] | 239 | ret = ion_alloc_fd(ion_fd_, len, legacy_align, heap_config.mask, flags, &alloc_fd); |
Hridya Valsaraju | ff5134a | 2020-06-11 14:23:31 -0700 | [diff] [blame] | 240 | if (ret) { |
| 241 | PLOG(ERROR) << "allocation fails for ion heap with mask: " << heap_config.mask |
| 242 | << " and flags: " << flags; |
| 243 | return ret; |
| 244 | } |
| 245 | return alloc_fd; |
| 246 | } |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 247 | |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 248 | int BufferAllocator::Alloc(const std::string& heap_name, size_t len, |
| 249 | unsigned int heap_flags, size_t legacy_align) { |
| 250 | int fd = DmabufAlloc(heap_name, len); |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 251 | |
Hridya Valsaraju | ac059d8 | 2020-10-28 15:17:32 -0700 | [diff] [blame] | 252 | if (fd < 0) |
| 253 | fd = IonAlloc(heap_name, len, heap_flags, legacy_align); |
| 254 | |
| 255 | return fd; |
Hridya Valsaraju | b07fb97 | 2020-06-08 22:42:54 -0700 | [diff] [blame] | 256 | } |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 257 | |
Hridya Valsaraju | f7b3d5d | 2021-03-22 14:52:57 -0700 | [diff] [blame] | 258 | int BufferAllocator::AllocSystem(bool cpu_access_needed, size_t len, unsigned int heap_flags, |
| 259 | size_t legacy_align) { |
| 260 | if (!cpu_access_needed) { |
| 261 | /* |
| 262 | * CPU does not need to access allocated buffer so we try to allocate in |
| 263 | * the 'system-uncached' heap after querying for its existence. |
| 264 | */ |
| 265 | static bool uncached_dmabuf_system_heap_support = [this]() -> bool { |
| 266 | auto dmabuf_heap_list = this->GetDmabufHeapList(); |
| 267 | return (dmabuf_heap_list.find(kDmabufSystemUncachedHeapName) != dmabuf_heap_list.end()); |
| 268 | }(); |
| 269 | |
| 270 | if (uncached_dmabuf_system_heap_support) |
| 271 | return DmabufAlloc(kDmabufSystemUncachedHeapName, len); |
| 272 | |
| 273 | static bool uncached_ion_system_heap_support = [this]() -> bool { |
| 274 | IonHeapConfig heap_config; |
| 275 | auto ret = this->GetIonConfig(kDmabufSystemUncachedHeapName, heap_config); |
| 276 | return (ret == 0); |
| 277 | }(); |
| 278 | |
| 279 | if (uncached_ion_system_heap_support) |
| 280 | return IonAlloc(kDmabufSystemUncachedHeapName, len, heap_flags, legacy_align); |
| 281 | } |
| 282 | |
| 283 | /* |
| 284 | * Either 1) CPU needs to access allocated buffer OR 2) CPU does not need to |
| 285 | * access allocated buffer but the "system-uncached" heap is unsupported. |
| 286 | */ |
| 287 | return Alloc(kDmabufSystemHeapName, len, heap_flags, legacy_align); |
| 288 | } |
| 289 | |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 290 | int BufferAllocator::LegacyIonCpuSync(unsigned int dmabuf_fd, |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 291 | const CustomCpuSyncLegacyIon& legacy_ion_cpu_sync_custom, |
| 292 | void *legacy_ion_custom_data) { |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 293 | if (!legacy_ion_cpu_sync_custom) |
| 294 | return ion_sync_fd(ion_fd_, dmabuf_fd); |
| 295 | |
| 296 | // dup ion_fd_ so that we retain its ownership. |
| 297 | int new_ion_fd = TEMP_FAILURE_RETRY(dup(ion_fd_.get())); |
| 298 | if (new_ion_fd < 0) { |
| 299 | PLOG(ERROR) << "Unable to dup ion fd. error: " << new_ion_fd; |
| 300 | return new_ion_fd; |
| 301 | } |
| 302 | |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 303 | int ret = legacy_ion_cpu_sync_custom(new_ion_fd, dmabuf_fd, legacy_ion_custom_data); |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 304 | |
| 305 | close(new_ion_fd); |
| 306 | return ret; |
| 307 | } |
| 308 | |
| 309 | int BufferAllocator::DoSync(unsigned int dmabuf_fd, bool start, SyncType sync_type, |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 310 | const CustomCpuSyncLegacyIon& legacy_ion_cpu_sync_custom, |
| 311 | void *legacy_ion_custom_data) { |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 312 | if (uses_legacy_ion_iface_) { |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 313 | return LegacyIonCpuSync(dmabuf_fd, legacy_ion_cpu_sync_custom, |
| 314 | legacy_ion_custom_data); |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 315 | } |
| 316 | |
| 317 | struct dma_buf_sync sync = { |
| 318 | .flags = (start ? DMA_BUF_SYNC_START : DMA_BUF_SYNC_END) | |
| 319 | static_cast<uint64_t>(sync_type), |
| 320 | }; |
| 321 | return TEMP_FAILURE_RETRY(ioctl(dmabuf_fd, DMA_BUF_IOCTL_SYNC, &sync)); |
| 322 | } |
| 323 | |
| 324 | int BufferAllocator::CpuSyncStart(unsigned int dmabuf_fd, SyncType sync_type, |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 325 | const CustomCpuSyncLegacyIon& legacy_ion_cpu_sync_custom, |
| 326 | void *legacy_ion_custom_data) { |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 327 | int ret = DoSync(dmabuf_fd, true /* start */, sync_type, legacy_ion_cpu_sync_custom, |
| 328 | legacy_ion_custom_data); |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 329 | |
Hridya Valsaraju | a83c24e | 2021-03-13 15:30:16 -0800 | [diff] [blame] | 330 | if (ret) PLOG(ERROR) << "CpuSyncStart() failure"; |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 331 | return ret; |
| 332 | } |
| 333 | |
Hridya Valsaraju | a83c24e | 2021-03-13 15:30:16 -0800 | [diff] [blame] | 334 | int BufferAllocator::CpuSyncEnd(unsigned int dmabuf_fd, SyncType sync_type, |
Chris Goldsworthy | ec5411c | 2020-08-14 10:14:59 -0700 | [diff] [blame] | 335 | const CustomCpuSyncLegacyIon& legacy_ion_cpu_sync_custom, |
Hridya Valsaraju | a83c24e | 2021-03-13 15:30:16 -0800 | [diff] [blame] | 336 | void* legacy_ion_custom_data) { |
| 337 | int ret = DoSync(dmabuf_fd, false /* start */, sync_type, legacy_ion_cpu_sync_custom, |
| 338 | legacy_ion_custom_data); |
| 339 | if (ret) PLOG(ERROR) << "CpuSyncEnd() failure"; |
Hridya Valsaraju | a53d7f2 | 2020-06-12 11:23:12 -0700 | [diff] [blame] | 340 | |
| 341 | return ret; |
| 342 | } |
Hridya Valsaraju | ac7673b | 2021-02-02 11:05:27 -0800 | [diff] [blame] | 343 | |
| 344 | std::unordered_set<std::string> BufferAllocator::GetDmabufHeapList() { |
| 345 | std::unordered_set<std::string> heap_list; |
| 346 | std::unique_ptr<DIR, int (*)(DIR*)> dir(opendir(kDmaHeapRoot), closedir); |
| 347 | |
| 348 | if (dir) { |
| 349 | struct dirent* dent; |
| 350 | while ((dent = readdir(dir.get()))) { |
| 351 | if (!strcmp(dent->d_name, ".") || !strcmp(dent->d_name, "..")) continue; |
| 352 | |
| 353 | heap_list.insert(dent->d_name); |
| 354 | } |
| 355 | } |
| 356 | |
| 357 | return heap_list; |
| 358 | } |
Hridya Valsaraju | 872252b | 2021-03-24 10:43:21 -0700 | [diff] [blame] | 359 | |
| 360 | bool BufferAllocator::CheckIonSupport() { |
| 361 | static bool ion_support = (access(kIonDevice, R_OK) == 0); |
| 362 | |
| 363 | return ion_support; |
| 364 | } |