Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 17 | #include "EvsV4lCamera.h" |
| 18 | #include "EvsEnumerator.h" |
| 19 | #include "bufferCopy.h" |
| 20 | |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 21 | #include <sys/types.h> |
| 22 | #include <sys/stat.h> |
| 23 | |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 24 | #include <android/hardware_buffer.h> |
| 25 | #include <android-base/logging.h> |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 26 | #include <android-base/unique_fd.h> |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 27 | #include <ui/GraphicBufferAllocator.h> |
| 28 | #include <ui/GraphicBufferMapper.h> |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 29 | #include <utils/SystemClock.h> |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 30 | |
Changyeon Jo | 0766099 | 2021-10-04 16:51:58 -0700 | [diff] [blame] | 31 | namespace { |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 32 | |
Changyeon Jo | 0766099 | 2021-10-04 16:51:58 -0700 | [diff] [blame] | 33 | // The size of a pixel of RGBA format data in bytes |
| 34 | constexpr auto kBytesPerPixelRGBA = 4; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 35 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 36 | // Default camera output image resolution |
| 37 | const std::array<int32_t, 2> kDefaultResolution = {640, 480}; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 38 | |
| 39 | // Arbitrary limit on number of graphics buffers allowed to be allocated |
| 40 | // Safeguards against unreasonable resource consumption and provides a testable limit |
| 41 | static const unsigned MAX_BUFFERS_IN_FLIGHT = 100; |
| 42 | |
Changyeon Jo | 0766099 | 2021-10-04 16:51:58 -0700 | [diff] [blame] | 43 | }; // anonymous namespace |
| 44 | |
| 45 | |
| 46 | namespace android { |
| 47 | namespace hardware { |
| 48 | namespace automotive { |
| 49 | namespace evs { |
| 50 | namespace V1_1 { |
| 51 | namespace implementation { |
| 52 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 53 | EvsV4lCamera::EvsV4lCamera(const char *deviceName, |
| 54 | unique_ptr<ConfigManager::CameraInfo> &camInfo) : |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 55 | mFramesAllowed(0), |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 56 | mFramesInUse(0), |
| 57 | mCameraInfo(camInfo) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 58 | LOG(DEBUG) << "EvsV4lCamera instantiated"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 59 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 60 | mDescription.v1.cameraId = deviceName; |
| 61 | if (camInfo != nullptr) { |
| 62 | mDescription.metadata.setToExternal((uint8_t *)camInfo->characteristics, |
| 63 | get_camera_metadata_size(camInfo->characteristics)); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 64 | } |
| 65 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 66 | // Default output buffer format. |
Changyeon Jo | fe4deca | 2019-02-21 23:24:55 +0000 | [diff] [blame] | 67 | mFormat = HAL_PIXEL_FORMAT_RGBA_8888; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 68 | |
| 69 | // How we expect to use the gralloc buffers we'll exchange with our client |
| 70 | mUsage = GRALLOC_USAGE_HW_TEXTURE | |
| 71 | GRALLOC_USAGE_SW_READ_RARELY | |
| 72 | GRALLOC_USAGE_SW_WRITE_OFTEN; |
| 73 | } |
| 74 | |
| 75 | |
| 76 | EvsV4lCamera::~EvsV4lCamera() { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 77 | LOG(DEBUG) << "EvsV4lCamera being destroyed"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 78 | shutdown(); |
| 79 | } |
| 80 | |
| 81 | |
| 82 | // |
| 83 | // This gets called if another caller "steals" ownership of the camera |
| 84 | // |
| 85 | void EvsV4lCamera::shutdown() |
| 86 | { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 87 | LOG(DEBUG) << "EvsV4lCamera shutdown"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 88 | |
| 89 | // Make sure our output stream is cleaned up |
| 90 | // (It really should be already) |
| 91 | stopVideoStream(); |
| 92 | |
| 93 | // Note: Since stopVideoStream is blocking, no other threads can now be running |
| 94 | |
| 95 | // Close our video capture device |
| 96 | mVideo.close(); |
| 97 | |
| 98 | // Drop all the graphics buffers we've been using |
| 99 | if (mBuffers.size() > 0) { |
| 100 | GraphicBufferAllocator& alloc(GraphicBufferAllocator::get()); |
| 101 | for (auto&& rec : mBuffers) { |
| 102 | if (rec.inUse) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 103 | LOG(WARNING) << "Releasing buffer despite remote ownership"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 104 | } |
| 105 | alloc.free(rec.handle); |
| 106 | rec.handle = nullptr; |
| 107 | } |
| 108 | mBuffers.clear(); |
| 109 | } |
| 110 | } |
| 111 | |
| 112 | |
| 113 | // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow. |
| 114 | Return<void> EvsV4lCamera::getCameraInfo(getCameraInfo_cb _hidl_cb) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 115 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 116 | |
| 117 | // Send back our self description |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 118 | _hidl_cb(mDescription.v1); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 119 | return Void(); |
| 120 | } |
| 121 | |
| 122 | |
| 123 | Return<EvsResult> EvsV4lCamera::setMaxFramesInFlight(uint32_t bufferCount) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 124 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 125 | std::lock_guard<std::mutex> lock(mAccessLock); |
| 126 | |
| 127 | // If we've been displaced by another owner of the camera, then we can't do anything else |
| 128 | if (!mVideo.isOpen()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 129 | LOG(WARNING) << "Ignoring setMaxFramesInFlight call when camera has been lost."; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 130 | return EvsResult::OWNERSHIP_LOST; |
| 131 | } |
| 132 | |
| 133 | // We cannot function without at least one video buffer to send data |
| 134 | if (bufferCount < 1) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 135 | LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 136 | return EvsResult::INVALID_ARG; |
| 137 | } |
| 138 | |
| 139 | // Update our internal state |
| 140 | if (setAvailableFrames_Locked(bufferCount)) { |
| 141 | return EvsResult::OK; |
| 142 | } else { |
| 143 | return EvsResult::BUFFER_NOT_AVAILABLE; |
| 144 | } |
| 145 | } |
| 146 | |
| 147 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 148 | Return<EvsResult> EvsV4lCamera::startVideoStream(const sp<IEvsCameraStream_1_0>& stream) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 149 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 150 | std::lock_guard<std::mutex> lock(mAccessLock); |
| 151 | |
| 152 | // If we've been displaced by another owner of the camera, then we can't do anything else |
| 153 | if (!mVideo.isOpen()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 154 | LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost."; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 155 | return EvsResult::OWNERSHIP_LOST; |
| 156 | } |
| 157 | if (mStream.get() != nullptr) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 158 | LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running."; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 159 | return EvsResult::STREAM_ALREADY_RUNNING; |
| 160 | } |
| 161 | |
| 162 | // If the client never indicated otherwise, configure ourselves for a single streaming buffer |
| 163 | if (mFramesAllowed < 1) { |
| 164 | if (!setAvailableFrames_Locked(1)) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 165 | LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 166 | return EvsResult::BUFFER_NOT_AVAILABLE; |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | // Choose which image transfer function we need |
| 171 | // Map from V4L2 to Android graphic buffer format |
| 172 | const uint32_t videoSrcFormat = mVideo.getV4LFormat(); |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 173 | LOG(INFO) << "Configuring to accept " << (char*)&videoSrcFormat |
| 174 | << " camera data and convert to " << std::hex << mFormat; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 175 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 176 | switch (mFormat) { |
| 177 | case HAL_PIXEL_FORMAT_YCRCB_420_SP: |
| 178 | switch (videoSrcFormat) { |
| 179 | case V4L2_PIX_FMT_NV21: mFillBufferFromVideo = fillNV21FromNV21; break; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 180 | case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillNV21FromYUYV; break; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 181 | default: |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 182 | LOG(ERROR) << "Unhandled camera output format: " |
| 183 | << ((char*)&videoSrcFormat)[0] |
| 184 | << ((char*)&videoSrcFormat)[1] |
| 185 | << ((char*)&videoSrcFormat)[2] |
| 186 | << ((char*)&videoSrcFormat)[3] |
| 187 | << std::hex << videoSrcFormat; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 188 | } |
| 189 | break; |
| 190 | case HAL_PIXEL_FORMAT_RGBA_8888: |
| 191 | switch (videoSrcFormat) { |
| 192 | case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillRGBAFromYUYV; break; |
| 193 | default: |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 194 | LOG(ERROR) << "Unhandled camera format " << (char*)&videoSrcFormat; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 195 | } |
| 196 | break; |
| 197 | case HAL_PIXEL_FORMAT_YCBCR_422_I: |
| 198 | switch (videoSrcFormat) { |
| 199 | case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillYUYVFromYUYV; break; |
Scott Randolph | f91b3fc | 2017-05-23 11:22:49 -0700 | [diff] [blame] | 200 | case V4L2_PIX_FMT_UYVY: mFillBufferFromVideo = fillYUYVFromUYVY; break; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 201 | default: |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 202 | LOG(ERROR) << "Unhandled camera format " << (char*)&videoSrcFormat; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 203 | } |
| 204 | break; |
| 205 | default: |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 206 | LOG(ERROR) << "Unhandled camera format " << (char*)&mFormat; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 207 | } |
| 208 | |
| 209 | |
| 210 | // Record the user's callback for use when we have a frame ready |
| 211 | mStream = stream; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 212 | mStream_1_1 = IEvsCameraStream_1_1::castFrom(mStream).withDefault(nullptr); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 213 | |
| 214 | // Set up the video stream with a callback to our member function forwardFrame() |
| 215 | if (!mVideo.startStream([this](VideoCapture*, imageBuffer* tgt, void* data) { |
| 216 | this->forwardFrame(tgt, data); |
| 217 | }) |
| 218 | ) { |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 219 | // No need to hold onto this if we failed to start |
| 220 | mStream = nullptr; |
| 221 | mStream_1_1 = nullptr; |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 222 | LOG(ERROR) << "Underlying camera start stream failed"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 223 | return EvsResult::UNDERLYING_SERVICE_ERROR; |
| 224 | } |
| 225 | |
| 226 | return EvsResult::OK; |
| 227 | } |
| 228 | |
| 229 | |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 230 | Return<void> EvsV4lCamera::doneWithFrame(const BufferDesc_1_0& buffer) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 231 | LOG(DEBUG) << __FUNCTION__; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 232 | doneWithFrame_impl(buffer.bufferId, buffer.memHandle); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 233 | |
| 234 | return Void(); |
| 235 | } |
| 236 | |
| 237 | |
| 238 | Return<void> EvsV4lCamera::stopVideoStream() { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 239 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 240 | |
| 241 | // Tell the capture device to stop (and block until it does) |
| 242 | mVideo.stopStream(); |
| 243 | |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 244 | if (mStream_1_1 != nullptr) { |
| 245 | // V1.1 client is waiting on STREAM_STOPPED event. |
| 246 | std::unique_lock <std::mutex> lock(mAccessLock); |
| 247 | |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 248 | EvsEventDesc event; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 249 | event.aType = EvsEventType::STREAM_STOPPED; |
| 250 | auto result = mStream_1_1->notify(event); |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 251 | if (!result.isOk()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 252 | LOG(ERROR) << "Error delivering end of stream event"; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 253 | } |
| 254 | |
| 255 | // Drop our reference to the client's stream receiver |
| 256 | mStream_1_1 = nullptr; |
| 257 | mStream = nullptr; |
| 258 | } else if (mStream != nullptr) { |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 259 | std::unique_lock <std::mutex> lock(mAccessLock); |
| 260 | |
| 261 | // Send one last NULL frame to signal the actual end of stream |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 262 | BufferDesc_1_0 nullBuff = {}; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 263 | auto result = mStream->deliverFrame(nullBuff); |
| 264 | if (!result.isOk()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 265 | LOG(ERROR) << "Error delivering end of stream marker"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 266 | } |
| 267 | |
| 268 | // Drop our reference to the client's stream receiver |
| 269 | mStream = nullptr; |
| 270 | } |
| 271 | |
| 272 | return Void(); |
| 273 | } |
| 274 | |
| 275 | |
| 276 | Return<int32_t> EvsV4lCamera::getExtendedInfo(uint32_t /*opaqueIdentifier*/) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 277 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 278 | // Return zero by default as required by the spec |
| 279 | return 0; |
| 280 | } |
| 281 | |
| 282 | |
| 283 | Return<EvsResult> EvsV4lCamera::setExtendedInfo(uint32_t /*opaqueIdentifier*/, |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 284 | int32_t /*opaqueValue*/) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 285 | LOG(DEBUG) << __FUNCTION__; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 286 | std::lock_guard<std::mutex> lock(mAccessLock); |
| 287 | |
| 288 | // If we've been displaced by another owner of the camera, then we can't do anything else |
| 289 | if (!mVideo.isOpen()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 290 | LOG(WARNING) << "Ignoring setExtendedInfo call when camera has been lost."; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 291 | return EvsResult::OWNERSHIP_LOST; |
| 292 | } |
| 293 | |
| 294 | // We don't store any device specific information in this implementation |
| 295 | return EvsResult::INVALID_ARG; |
| 296 | } |
| 297 | |
| 298 | |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 299 | // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow. |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 300 | Return<void> EvsV4lCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 301 | LOG(DEBUG) << __FUNCTION__; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 302 | |
| 303 | // Send back our self description |
| 304 | _hidl_cb(mDescription); |
| 305 | return Void(); |
| 306 | } |
| 307 | |
| 308 | |
Changyeon Jo | 814933e | 2019-12-02 10:16:07 -0800 | [diff] [blame] | 309 | Return<void> EvsV4lCamera::getPhysicalCameraInfo(const hidl_string& id, |
| 310 | getPhysicalCameraInfo_cb _hidl_cb) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 311 | LOG(DEBUG) << __FUNCTION__; |
Changyeon Jo | 814933e | 2019-12-02 10:16:07 -0800 | [diff] [blame] | 312 | |
| 313 | // This method works exactly same as getCameraInfo_1_1() in EVS HW module. |
| 314 | (void)id; |
| 315 | _hidl_cb(mDescription); |
| 316 | return Void(); |
| 317 | } |
| 318 | |
| 319 | |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 320 | Return<EvsResult> EvsV4lCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 321 | LOG(DEBUG) << __FUNCTION__; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 322 | |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 323 | for (auto&& buffer : buffers) { |
| 324 | doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle); |
| 325 | } |
| 326 | |
| 327 | return EvsResult::OK; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 328 | } |
| 329 | |
| 330 | |
| 331 | Return<EvsResult> EvsV4lCamera::pauseVideoStream() { |
| 332 | return EvsResult::UNDERLYING_SERVICE_ERROR; |
| 333 | } |
| 334 | |
| 335 | |
| 336 | Return<EvsResult> EvsV4lCamera::resumeVideoStream() { |
| 337 | return EvsResult::UNDERLYING_SERVICE_ERROR; |
| 338 | } |
| 339 | |
| 340 | |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 341 | Return<EvsResult> EvsV4lCamera::setMaster() { |
| 342 | /* Because EVS HW module reference implementation expects a single client at |
| 343 | * a time, this returns a success code always. |
| 344 | */ |
| 345 | return EvsResult::OK; |
| 346 | } |
| 347 | |
| 348 | |
Changyeon Jo | 9b68f8c | 2019-12-29 11:53:02 -0800 | [diff] [blame] | 349 | Return<EvsResult> EvsV4lCamera::forceMaster(const sp<IEvsDisplay_1_0>&) { |
Changyeon Jo | 2c81913 | 2019-08-17 21:35:54 -0700 | [diff] [blame] | 350 | /* Because EVS HW module reference implementation expects a single client at |
| 351 | * a time, this returns a success code always. |
| 352 | */ |
| 353 | return EvsResult::OK; |
| 354 | } |
| 355 | |
| 356 | |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 357 | Return<EvsResult> EvsV4lCamera::unsetMaster() { |
| 358 | /* Because EVS HW module reference implementation expects a single client at |
Changyeon Jo | 905c6c6 | 2020-08-06 10:25:51 -0700 | [diff] [blame] | 359 | * a time, there is no chance that this is called by the secondary client and |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 360 | * therefore returns a success code always. |
| 361 | */ |
| 362 | return EvsResult::OK; |
| 363 | } |
| 364 | |
| 365 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 366 | Return<void> EvsV4lCamera::getParameterList(getParameterList_cb _hidl_cb) { |
| 367 | hidl_vec<CameraParam> hidlCtrls; |
| 368 | if (mCameraInfo != nullptr) { |
| 369 | hidlCtrls.resize(mCameraInfo->controls.size()); |
| 370 | unsigned idx = 0; |
| 371 | for (auto& [cid, range]: mCameraInfo->controls) { |
| 372 | hidlCtrls[idx++] = cid; |
| 373 | } |
| 374 | } |
| 375 | |
| 376 | _hidl_cb(hidlCtrls); |
| 377 | return Void(); |
| 378 | } |
| 379 | |
| 380 | |
| 381 | Return<void> EvsV4lCamera::getIntParameterRange(CameraParam id, |
| 382 | getIntParameterRange_cb _hidl_cb) { |
| 383 | if (mCameraInfo != nullptr) { |
| 384 | auto range = mCameraInfo->controls[id]; |
| 385 | _hidl_cb(get<0>(range), get<1>(range), get<2>(range)); |
| 386 | } else { |
| 387 | _hidl_cb(0, 0, 0); |
| 388 | } |
| 389 | |
| 390 | return Void(); |
| 391 | } |
| 392 | |
| 393 | |
| 394 | Return<void> EvsV4lCamera::setIntParameter(CameraParam id, int32_t value, |
| 395 | setIntParameter_cb _hidl_cb) { |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 396 | uint32_t v4l2cid = V4L2_CID_BASE; |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 397 | hidl_vec<int32_t> values; |
| 398 | values.resize(1); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 399 | if (!convertToV4l2CID(id, v4l2cid)) { |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 400 | _hidl_cb(EvsResult::INVALID_ARG, values); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 401 | } else { |
| 402 | EvsResult result = EvsResult::OK; |
| 403 | v4l2_control control = {v4l2cid, value}; |
| 404 | if (mVideo.setParameter(control) < 0 || |
| 405 | mVideo.getParameter(control) < 0) { |
| 406 | result = EvsResult::UNDERLYING_SERVICE_ERROR; |
| 407 | } |
| 408 | |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 409 | values[0] = control.value; |
| 410 | _hidl_cb(result, values); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 411 | } |
| 412 | |
| 413 | return Void(); |
| 414 | } |
| 415 | |
| 416 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 417 | Return<void> EvsV4lCamera::getIntParameter(CameraParam id, |
| 418 | getIntParameter_cb _hidl_cb) { |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 419 | uint32_t v4l2cid = V4L2_CID_BASE; |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 420 | hidl_vec<int32_t> values; |
| 421 | values.resize(1); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 422 | if (!convertToV4l2CID(id, v4l2cid)) { |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 423 | _hidl_cb(EvsResult::INVALID_ARG, values); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 424 | } else { |
| 425 | EvsResult result = EvsResult::OK; |
| 426 | v4l2_control control = {v4l2cid, 0}; |
| 427 | if (mVideo.getParameter(control) < 0) { |
| 428 | result = EvsResult::INVALID_ARG; |
| 429 | } |
| 430 | |
| 431 | // Report a result |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 432 | values[0] = control.value; |
| 433 | _hidl_cb(result, values); |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 434 | } |
| 435 | |
| 436 | return Void(); |
| 437 | } |
| 438 | |
| 439 | |
Changyeon Jo | 6d99f68 | 2020-02-17 14:13:09 -0800 | [diff] [blame] | 440 | Return<EvsResult> EvsV4lCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier, |
| 441 | const hidl_vec<uint8_t>& opaqueValue) { |
| 442 | mExtInfo.insert_or_assign(opaqueIdentifier, opaqueValue); |
| 443 | return EvsResult::OK; |
| 444 | } |
| 445 | |
| 446 | |
| 447 | Return<void> EvsV4lCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier, |
| 448 | getExtendedInfo_1_1_cb _hidl_cb) { |
| 449 | const auto it = mExtInfo.find(opaqueIdentifier); |
| 450 | hidl_vec<uint8_t> value; |
| 451 | auto status = EvsResult::OK; |
| 452 | if (it == mExtInfo.end()) { |
| 453 | status = EvsResult::INVALID_ARG; |
| 454 | } else { |
| 455 | value = mExtInfo[opaqueIdentifier]; |
| 456 | } |
| 457 | |
| 458 | _hidl_cb(status, value); |
| 459 | return Void(); |
| 460 | } |
| 461 | |
| 462 | |
Changyeon Jo | 6e90a3b | 2020-03-29 07:50:53 -0700 | [diff] [blame] | 463 | Return<void> |
| 464 | EvsV4lCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers, |
| 465 | importExternalBuffers_cb _hidl_cb) { |
| 466 | LOG(DEBUG) << __FUNCTION__; |
| 467 | |
| 468 | // If we've been displaced by another owner of the camera, then we can't do anything else |
| 469 | if (!mVideo.isOpen()) { |
| 470 | LOG(WARNING) << "Ignoring a request add external buffers " |
| 471 | << "when camera has been lost."; |
| 472 | _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, mFramesAllowed); |
| 473 | return {}; |
| 474 | } |
| 475 | |
| 476 | auto numBuffersToAdd = buffers.size(); |
| 477 | if (numBuffersToAdd < 1) { |
| 478 | LOG(DEBUG) << "No buffers to add."; |
| 479 | _hidl_cb(EvsResult::OK, mFramesAllowed); |
| 480 | return {}; |
| 481 | } |
| 482 | |
| 483 | { |
| 484 | std::scoped_lock<std::mutex> lock(mAccessLock); |
| 485 | |
| 486 | if (numBuffersToAdd > (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed)) { |
| 487 | numBuffersToAdd -= (MAX_BUFFERS_IN_FLIGHT - mFramesAllowed); |
| 488 | LOG(WARNING) << "Exceed the limit on number of buffers. " |
| 489 | << numBuffersToAdd << " buffers will be added only."; |
| 490 | } |
| 491 | |
| 492 | GraphicBufferMapper& mapper = GraphicBufferMapper::get(); |
| 493 | const auto before = mFramesAllowed; |
| 494 | for (auto i = 0; i < numBuffersToAdd; ++i) { |
| 495 | // TODO: reject if external buffer is configured differently. |
| 496 | auto& b = buffers[i]; |
| 497 | const AHardwareBuffer_Desc* pDesc = |
| 498 | reinterpret_cast<const AHardwareBuffer_Desc *>(&b.buffer.description); |
| 499 | |
| 500 | // Import a buffer to add |
| 501 | buffer_handle_t memHandle = nullptr; |
| 502 | status_t result = mapper.importBuffer(b.buffer.nativeHandle, |
| 503 | pDesc->width, |
| 504 | pDesc->height, |
| 505 | 1, |
| 506 | pDesc->format, |
| 507 | pDesc->usage, |
| 508 | pDesc->stride, |
| 509 | &memHandle); |
| 510 | if (result != android::NO_ERROR || !memHandle) { |
| 511 | LOG(WARNING) << "Failed to import a buffer " << b.bufferId; |
| 512 | continue; |
| 513 | } |
| 514 | |
| 515 | auto stored = false; |
| 516 | for (auto&& rec : mBuffers) { |
| 517 | if (rec.handle == nullptr) { |
| 518 | // Use this existing entry |
| 519 | rec.handle = memHandle; |
| 520 | rec.inUse = false; |
| 521 | |
| 522 | stored = true; |
| 523 | break; |
| 524 | } |
| 525 | } |
| 526 | |
| 527 | if (!stored) { |
| 528 | // Add a BufferRecord wrapping this handle to our set of available buffers |
| 529 | mBuffers.emplace_back(memHandle); |
| 530 | } |
| 531 | |
| 532 | ++mFramesAllowed; |
| 533 | } |
| 534 | |
| 535 | _hidl_cb(EvsResult::OK, mFramesAllowed - before); |
| 536 | return {}; |
| 537 | } |
| 538 | } |
| 539 | |
| 540 | |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 541 | EvsResult EvsV4lCamera::doneWithFrame_impl(const uint32_t bufferId, |
| 542 | const buffer_handle_t memHandle) { |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 543 | std::lock_guard <std::mutex> lock(mAccessLock); |
| 544 | |
| 545 | // If we've been displaced by another owner of the camera, then we can't do anything else |
| 546 | if (!mVideo.isOpen()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 547 | LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost."; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 548 | } else { |
| 549 | if (memHandle == nullptr) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 550 | LOG(ERROR) << "Ignoring doneWithFrame called with null handle"; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 551 | } else if (bufferId >= mBuffers.size()) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 552 | LOG(ERROR) << "Ignoring doneWithFrame called with invalid bufferId " << bufferId |
| 553 | << " (max is " << mBuffers.size() - 1 << ")"; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 554 | } else if (!mBuffers[bufferId].inUse) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 555 | LOG(ERROR) << "Ignoring doneWithFrame called on frame " << bufferId |
| 556 | << " which is already free"; |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 557 | } else { |
| 558 | // Mark the frame as available |
| 559 | mBuffers[bufferId].inUse = false; |
| 560 | mFramesInUse--; |
| 561 | |
| 562 | // If this frame's index is high in the array, try to move it down |
| 563 | // to improve locality after mFramesAllowed has been reduced. |
| 564 | if (bufferId >= mFramesAllowed) { |
| 565 | // Find an empty slot lower in the array (which should always exist in this case) |
| 566 | for (auto&& rec : mBuffers) { |
| 567 | if (rec.handle == nullptr) { |
| 568 | rec.handle = mBuffers[bufferId].handle; |
| 569 | mBuffers[bufferId].handle = nullptr; |
| 570 | break; |
| 571 | } |
| 572 | } |
| 573 | } |
| 574 | } |
| 575 | } |
| 576 | |
| 577 | return EvsResult::OK; |
| 578 | } |
| 579 | |
| 580 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 581 | bool EvsV4lCamera::setAvailableFrames_Locked(unsigned bufferCount) { |
| 582 | if (bufferCount < 1) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 583 | LOG(ERROR) << "Ignoring request to set buffer count to zero"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 584 | return false; |
| 585 | } |
| 586 | if (bufferCount > MAX_BUFFERS_IN_FLIGHT) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 587 | LOG(ERROR) << "Rejecting buffer request in excess of internal limit"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 588 | return false; |
| 589 | } |
| 590 | |
| 591 | // Is an increase required? |
| 592 | if (mFramesAllowed < bufferCount) { |
| 593 | // An increase is required |
| 594 | unsigned needed = bufferCount - mFramesAllowed; |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 595 | LOG(INFO) << "Allocating " << needed << " buffers for camera frames"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 596 | |
| 597 | unsigned added = increaseAvailableFrames_Locked(needed); |
| 598 | if (added != needed) { |
| 599 | // If we didn't add all the frames we needed, then roll back to the previous state |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 600 | LOG(ERROR) << "Rolling back to previous frame queue size"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 601 | decreaseAvailableFrames_Locked(added); |
| 602 | return false; |
| 603 | } |
| 604 | } else if (mFramesAllowed > bufferCount) { |
| 605 | // A decrease is required |
| 606 | unsigned framesToRelease = mFramesAllowed - bufferCount; |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 607 | LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 608 | |
| 609 | unsigned released = decreaseAvailableFrames_Locked(framesToRelease); |
| 610 | if (released != framesToRelease) { |
| 611 | // This shouldn't happen with a properly behaving client because the client |
| 612 | // should only make this call after returning sufficient outstanding buffers |
| 613 | // to allow a clean resize. |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 614 | LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 615 | } |
| 616 | } |
| 617 | |
| 618 | return true; |
| 619 | } |
| 620 | |
| 621 | |
| 622 | unsigned EvsV4lCamera::increaseAvailableFrames_Locked(unsigned numToAdd) { |
| 623 | // Acquire the graphics buffer allocator |
| 624 | GraphicBufferAllocator &alloc(GraphicBufferAllocator::get()); |
| 625 | |
| 626 | unsigned added = 0; |
| 627 | |
| 628 | |
| 629 | while (added < numToAdd) { |
| 630 | unsigned pixelsPerLine; |
| 631 | buffer_handle_t memHandle = nullptr; |
| 632 | status_t result = alloc.allocate(mVideo.getWidth(), mVideo.getHeight(), |
| 633 | mFormat, 1, |
| 634 | mUsage, |
| 635 | &memHandle, &pixelsPerLine, 0, "EvsV4lCamera"); |
| 636 | if (result != NO_ERROR) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 637 | LOG(ERROR) << "Error " << result << " allocating " |
| 638 | << mVideo.getWidth() << " x " << mVideo.getHeight() |
| 639 | << " graphics buffer"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 640 | break; |
| 641 | } |
| 642 | if (!memHandle) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 643 | LOG(ERROR) << "We didn't get a buffer handle back from the allocator"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 644 | break; |
| 645 | } |
| 646 | if (mStride) { |
| 647 | if (mStride != pixelsPerLine) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 648 | LOG(ERROR) << "We did not expect to get buffers with different strides!"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 649 | } |
| 650 | } else { |
| 651 | // Gralloc defines stride in terms of pixels per line |
| 652 | mStride = pixelsPerLine; |
| 653 | } |
| 654 | |
| 655 | // Find a place to store the new buffer |
| 656 | bool stored = false; |
| 657 | for (auto&& rec : mBuffers) { |
| 658 | if (rec.handle == nullptr) { |
| 659 | // Use this existing entry |
| 660 | rec.handle = memHandle; |
| 661 | rec.inUse = false; |
| 662 | stored = true; |
| 663 | break; |
| 664 | } |
| 665 | } |
| 666 | if (!stored) { |
| 667 | // Add a BufferRecord wrapping this handle to our set of available buffers |
| 668 | mBuffers.emplace_back(memHandle); |
| 669 | } |
| 670 | |
| 671 | mFramesAllowed++; |
| 672 | added++; |
| 673 | } |
| 674 | |
| 675 | return added; |
| 676 | } |
| 677 | |
| 678 | |
| 679 | unsigned EvsV4lCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) { |
| 680 | // Acquire the graphics buffer allocator |
| 681 | GraphicBufferAllocator &alloc(GraphicBufferAllocator::get()); |
| 682 | |
| 683 | unsigned removed = 0; |
| 684 | |
| 685 | for (auto&& rec : mBuffers) { |
| 686 | // Is this record not in use, but holding a buffer that we can free? |
| 687 | if ((rec.inUse == false) && (rec.handle != nullptr)) { |
| 688 | // Release buffer and update the record so we can recognize it as "empty" |
| 689 | alloc.free(rec.handle); |
| 690 | rec.handle = nullptr; |
| 691 | |
| 692 | mFramesAllowed--; |
| 693 | removed++; |
| 694 | |
| 695 | if (removed == numToRemove) { |
| 696 | break; |
| 697 | } |
| 698 | } |
| 699 | } |
| 700 | |
| 701 | return removed; |
| 702 | } |
| 703 | |
| 704 | |
| 705 | // This is the async callback from the video camera that tells us a frame is ready |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 706 | void EvsV4lCamera::forwardFrame(imageBuffer* pV4lBuff, void* pData) { |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 707 | bool readyForFrame = false; |
| 708 | size_t idx = 0; |
| 709 | |
| 710 | // Lock scope for updating shared state |
| 711 | { |
| 712 | std::lock_guard<std::mutex> lock(mAccessLock); |
| 713 | |
| 714 | // Are we allowed to issue another buffer? |
| 715 | if (mFramesInUse >= mFramesAllowed) { |
| 716 | // Can't do anything right now -- skip this frame |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 717 | LOG(WARNING) << "Skipped a frame because too many are in flight"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 718 | } else { |
| 719 | // Identify an available buffer to fill |
| 720 | for (idx = 0; idx < mBuffers.size(); idx++) { |
| 721 | if (!mBuffers[idx].inUse) { |
| 722 | if (mBuffers[idx].handle != nullptr) { |
| 723 | // Found an available record, so stop looking |
| 724 | break; |
| 725 | } |
| 726 | } |
| 727 | } |
| 728 | if (idx >= mBuffers.size()) { |
| 729 | // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 730 | LOG(ERROR) << "Failed to find an available buffer slot"; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 731 | } else { |
| 732 | // We're going to make the frame busy |
| 733 | mBuffers[idx].inUse = true; |
| 734 | mFramesInUse++; |
| 735 | readyForFrame = true; |
| 736 | } |
| 737 | } |
| 738 | } |
| 739 | |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 740 | if (mDumpFrame) { |
| 741 | // Construct a target filename with the device identifier |
| 742 | std::string filename = std::string(mDescription.v1.cameraId); |
| 743 | std::replace(filename.begin(), filename.end(), '/', '_'); |
Marin Shalamanov | 4ed2877 | 2020-08-11 20:32:08 +0200 | [diff] [blame] | 744 | filename = mDumpPath + filename + "_" + std::to_string(mFrameCounter) + ".bin"; |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 745 | |
| 746 | android::base::unique_fd fd(open(filename.c_str(), |
| 747 | O_WRONLY | O_CREAT, |
| 748 | S_IRUSR | S_IWUSR | S_IRGRP)); |
Changyeon Jo | 0832130 | 2021-10-04 12:00:32 -0700 | [diff] [blame] | 749 | LOG(INFO) << filename << ", " << fd; |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 750 | if (fd == -1) { |
| 751 | PLOG(ERROR) << "Failed to open a file, " << filename; |
| 752 | } else { |
Changyeon Jo | 33d2bc2 | 2020-09-09 10:24:46 -0700 | [diff] [blame] | 753 | auto width = mVideo.getWidth(); |
| 754 | auto height = mVideo.getHeight(); |
| 755 | auto len = write(fd.get(), &width, sizeof(width)); |
| 756 | len += write(fd.get(), &height, sizeof(height)); |
| 757 | len += write(fd.get(), &mStride, sizeof(mStride)); |
| 758 | len += write(fd.get(), &mFormat, sizeof(mFormat)); |
| 759 | len += write(fd.get(), pData, pV4lBuff->length); |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 760 | LOG(INFO) << len << " bytes are written to " << filename; |
| 761 | } |
| 762 | } |
| 763 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 764 | if (!readyForFrame) { |
Changyeon Jo | 6e90a3b | 2020-03-29 07:50:53 -0700 | [diff] [blame] | 765 | // We need to return the video buffer so it can capture a new frame |
Changyeon Jo | c6313cc | 2020-07-27 13:04:33 -0700 | [diff] [blame] | 766 | mVideo.markFrameConsumed(pV4lBuff->index); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 767 | } else { |
| 768 | // Assemble the buffer description we'll transmit below |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 769 | BufferDesc_1_1 bufDesc_1_1 = {}; |
| 770 | AHardwareBuffer_Desc* pDesc = |
| 771 | reinterpret_cast<AHardwareBuffer_Desc *>(&bufDesc_1_1.buffer.description); |
| 772 | pDesc->width = mVideo.getWidth(); |
| 773 | pDesc->height = mVideo.getHeight(); |
| 774 | pDesc->layers = 1; |
| 775 | pDesc->format = mFormat; |
| 776 | pDesc->usage = mUsage; |
| 777 | pDesc->stride = mStride; |
| 778 | bufDesc_1_1.buffer.nativeHandle = mBuffers[idx].handle; |
| 779 | bufDesc_1_1.bufferId = idx; |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 780 | bufDesc_1_1.deviceId = mDescription.v1.cameraId; |
| 781 | // timestamp in microseconds. |
| 782 | bufDesc_1_1.timestamp = |
| 783 | pV4lBuff->timestamp.tv_sec * 1e+6 + pV4lBuff->timestamp.tv_usec; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 784 | |
Changyeon Jo | 0766099 | 2021-10-04 16:51:58 -0700 | [diff] [blame] | 785 | const auto sizeInRGBA = pDesc->width * pDesc->height * kBytesPerPixelRGBA; |
| 786 | if (mColorSpaceConversionBuffer.size() < sizeInRGBA) { |
| 787 | mColorSpaceConversionBuffer.resize(sizeInRGBA); |
| 788 | } |
| 789 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 790 | // Lock our output buffer for writing |
Changyeon Jo | 03c97d0 | 2019-12-01 14:37:12 -0800 | [diff] [blame] | 791 | // TODO(b/145459970): Sometimes, physical camera device maps a buffer |
| 792 | // into the address that is about to be unmapped by another device; this |
| 793 | // causes SEGV_MAPPER. |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 794 | void *targetPixels = nullptr; |
| 795 | GraphicBufferMapper &mapper = GraphicBufferMapper::get(); |
Changyeon Jo | 6e90a3b | 2020-03-29 07:50:53 -0700 | [diff] [blame] | 796 | status_t result = |
| 797 | mapper.lock(bufDesc_1_1.buffer.nativeHandle, |
| 798 | GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER, |
| 799 | android::Rect(pDesc->width, pDesc->height), |
| 800 | (void **)&targetPixels); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 801 | |
| 802 | // If we failed to lock the pixel buffer, we're about to crash, but log it first |
| 803 | if (!targetPixels) { |
Changyeon Jo | 03c97d0 | 2019-12-01 14:37:12 -0800 | [diff] [blame] | 804 | // TODO(b/145457727): When EvsHidlTest::CameraToDisplayRoundTrip |
| 805 | // test case was repeatedly executed, EVS occasionally fails to map |
| 806 | // a buffer. |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 807 | LOG(ERROR) << "Camera failed to gain access to image buffer for writing - " |
| 808 | << " status: " << statusToString(result) |
| 809 | << " , error: " << strerror(errno); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 810 | } |
| 811 | |
| 812 | // Transfer the video image into the output buffer, making any needed |
| 813 | // format conversion along the way |
Changyeon Jo | 0766099 | 2021-10-04 16:51:58 -0700 | [diff] [blame] | 814 | mFillBufferFromVideo(bufDesc_1_1, (uint8_t *)targetPixels, pData, |
| 815 | mColorSpaceConversionBuffer.data(), mStride); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 816 | |
| 817 | // Unlock the output buffer |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 818 | mapper.unlock(bufDesc_1_1.buffer.nativeHandle); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 819 | |
| 820 | // Give the video frame back to the underlying device for reuse |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 821 | // Note that we do this before making the client callback to give the |
| 822 | // underlying camera more time to capture the next frame |
Changyeon Jo | c6313cc | 2020-07-27 13:04:33 -0700 | [diff] [blame] | 823 | mVideo.markFrameConsumed(pV4lBuff->index); |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 824 | |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 825 | // Issue the (asynchronous) callback to the client -- can't be holding |
| 826 | // the lock |
| 827 | bool flag = false; |
| 828 | if (mStream_1_1 != nullptr) { |
Changyeon Jo | 4ede4d6 | 2019-10-22 10:10:15 -0700 | [diff] [blame] | 829 | hidl_vec<BufferDesc_1_1> frames; |
| 830 | frames.resize(1); |
| 831 | frames[0] = bufDesc_1_1; |
| 832 | auto result = mStream_1_1->deliverFrame_1_1(frames); |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 833 | flag = result.isOk(); |
| 834 | } else { |
| 835 | BufferDesc_1_0 bufDesc_1_0 = { |
| 836 | pDesc->width, |
| 837 | pDesc->height, |
| 838 | pDesc->stride, |
| 839 | bufDesc_1_1.pixelSize, |
| 840 | static_cast<uint32_t>(pDesc->format), |
| 841 | static_cast<uint32_t>(pDesc->usage), |
| 842 | bufDesc_1_1.bufferId, |
| 843 | bufDesc_1_1.buffer.nativeHandle |
| 844 | }; |
| 845 | |
| 846 | auto result = mStream->deliverFrame(bufDesc_1_0); |
| 847 | flag = result.isOk(); |
| 848 | } |
| 849 | |
| 850 | if (flag) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 851 | LOG(DEBUG) << "Delivered " << bufDesc_1_1.buffer.nativeHandle.getNativeHandle() |
| 852 | << " as id " << bufDesc_1_1.bufferId; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 853 | } else { |
| 854 | // This can happen if the client dies and is likely unrecoverable. |
| 855 | // To avoid consuming resources generating failing calls, we stop sending |
| 856 | // frames. Note, however, that the stream remains in the "STREAMING" state |
| 857 | // until cleaned up on the main thread. |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 858 | LOG(ERROR) << "Frame delivery call failed in the transport layer."; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 859 | |
| 860 | // Since we didn't actually deliver it, mark the frame as available |
| 861 | std::lock_guard<std::mutex> lock(mAccessLock); |
| 862 | mBuffers[idx].inUse = false; |
Changyeon Jo | 6e90a3b | 2020-03-29 07:50:53 -0700 | [diff] [blame] | 863 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 864 | mFramesInUse--; |
| 865 | } |
| 866 | } |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 867 | |
| 868 | // Increse a frame counter |
| 869 | ++mFrameCounter; |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 870 | } |
| 871 | |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 872 | |
| 873 | bool EvsV4lCamera::convertToV4l2CID(CameraParam id, uint32_t& v4l2cid) { |
| 874 | switch (id) { |
| 875 | case CameraParam::BRIGHTNESS: |
| 876 | v4l2cid = V4L2_CID_BRIGHTNESS; |
| 877 | break; |
| 878 | case CameraParam::CONTRAST: |
| 879 | v4l2cid = V4L2_CID_CONTRAST; |
| 880 | break; |
| 881 | case CameraParam::AUTO_WHITE_BALANCE: |
| 882 | v4l2cid = V4L2_CID_AUTO_WHITE_BALANCE; |
| 883 | break; |
| 884 | case CameraParam::WHITE_BALANCE_TEMPERATURE: |
| 885 | v4l2cid = V4L2_CID_WHITE_BALANCE_TEMPERATURE; |
| 886 | break; |
| 887 | case CameraParam::SHARPNESS: |
| 888 | v4l2cid = V4L2_CID_SHARPNESS; |
| 889 | break; |
| 890 | case CameraParam::AUTO_EXPOSURE: |
| 891 | v4l2cid = V4L2_CID_EXPOSURE_AUTO; |
| 892 | break; |
| 893 | case CameraParam::ABSOLUTE_EXPOSURE: |
| 894 | v4l2cid = V4L2_CID_EXPOSURE_ABSOLUTE; |
| 895 | break; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 896 | case CameraParam::AUTO_FOCUS: |
| 897 | v4l2cid = V4L2_CID_FOCUS_AUTO; |
| 898 | break; |
| 899 | case CameraParam::ABSOLUTE_FOCUS: |
| 900 | v4l2cid = V4L2_CID_FOCUS_ABSOLUTE; |
| 901 | break; |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 902 | case CameraParam::ABSOLUTE_ZOOM: |
| 903 | v4l2cid = V4L2_CID_ZOOM_ABSOLUTE; |
| 904 | break; |
| 905 | default: |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 906 | LOG(ERROR) << "Camera parameter " << static_cast<unsigned>(id) << " is unknown."; |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 907 | return false; |
| 908 | } |
| 909 | |
Changyeon Jo | 386f369 | 2020-06-07 11:08:19 -0700 | [diff] [blame] | 910 | return mCameraControls.find(v4l2cid) != mCameraControls.end(); |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 911 | } |
| 912 | |
| 913 | |
| 914 | sp<EvsV4lCamera> EvsV4lCamera::Create(const char *deviceName) { |
| 915 | unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr; |
| 916 | |
| 917 | return Create(deviceName, nullCamInfo); |
| 918 | } |
| 919 | |
| 920 | |
| 921 | sp<EvsV4lCamera> EvsV4lCamera::Create(const char *deviceName, |
| 922 | unique_ptr<ConfigManager::CameraInfo> &camInfo, |
| 923 | const Stream *requestedStreamCfg) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 924 | LOG(INFO) << "Create " << deviceName; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 925 | sp<EvsV4lCamera> evsCamera = new EvsV4lCamera(deviceName, camInfo); |
| 926 | if (evsCamera == nullptr) { |
| 927 | return nullptr; |
| 928 | } |
| 929 | |
| 930 | // Initialize the video device |
| 931 | bool success = false; |
Changyeon Jo | 03c97d0 | 2019-12-01 14:37:12 -0800 | [diff] [blame] | 932 | if (camInfo != nullptr && requestedStreamCfg != nullptr) { |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 933 | // Validate a given stream configuration. If there is no exact match, |
| 934 | // this will try to find the best match based on: |
| 935 | // 1) same output format |
| 936 | // 2) the largest resolution that is smaller that a given configuration. |
| 937 | int32_t streamId = -1, area = INT_MIN; |
| 938 | for (auto& [id, cfg] : camInfo->streamConfigurations) { |
| 939 | // RawConfiguration has id, width, height, format, direction, and |
| 940 | // fps. |
| 941 | if (cfg[3] == static_cast<uint32_t>(requestedStreamCfg->format)) { |
| 942 | if (cfg[1] == requestedStreamCfg->width && |
| 943 | cfg[2] == requestedStreamCfg->height) { |
| 944 | // Find exact match. |
| 945 | streamId = id; |
| 946 | break; |
| 947 | } else if (requestedStreamCfg->width > cfg[1] && |
| 948 | requestedStreamCfg->height > cfg[2] && |
| 949 | cfg[1] * cfg[2] > area) { |
| 950 | streamId = id; |
| 951 | area = cfg[1] * cfg[2]; |
| 952 | } |
| 953 | } |
| 954 | |
| 955 | } |
| 956 | |
| 957 | if (streamId >= 0) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 958 | LOG(INFO) << "Try to open a video with " |
| 959 | << "width: " << camInfo->streamConfigurations[streamId][1] |
| 960 | << ", height: " << camInfo->streamConfigurations[streamId][2] |
| 961 | << ", format: " << camInfo->streamConfigurations[streamId][3]; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 962 | success = |
| 963 | evsCamera->mVideo.open(deviceName, |
| 964 | camInfo->streamConfigurations[streamId][1], |
| 965 | camInfo->streamConfigurations[streamId][2]); |
| 966 | evsCamera->mFormat = static_cast<uint32_t>(camInfo->streamConfigurations[streamId][3]); |
| 967 | } |
| 968 | } |
| 969 | |
| 970 | if (!success) { |
| 971 | // Create a camera object with the default resolution and format |
| 972 | // , HAL_PIXEL_FORMAT_RGBA_8888. |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 973 | LOG(INFO) << "Open a video with default parameters"; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 974 | success = |
| 975 | evsCamera->mVideo.open(deviceName, kDefaultResolution[0], kDefaultResolution[1]); |
| 976 | if (!success) { |
Changyeon Jo | ffdf3db | 2020-03-06 15:23:02 -0800 | [diff] [blame] | 977 | LOG(ERROR) << "Failed to open a video stream"; |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 978 | return nullptr; |
| 979 | } |
| 980 | } |
| 981 | |
Changyeon Jo | 386f369 | 2020-06-07 11:08:19 -0700 | [diff] [blame] | 982 | // List available camera parameters |
| 983 | evsCamera->mCameraControls = evsCamera->mVideo.enumerateCameraControls(); |
| 984 | |
Changyeon Jo | aab96aa | 2019-10-12 05:24:15 -0700 | [diff] [blame] | 985 | // Please note that the buffer usage flag does not come from a given stream |
| 986 | // configuration. |
| 987 | evsCamera->mUsage = GRALLOC_USAGE_HW_TEXTURE | |
| 988 | GRALLOC_USAGE_SW_READ_RARELY | |
| 989 | GRALLOC_USAGE_SW_WRITE_OFTEN; |
| 990 | |
| 991 | return evsCamera; |
Changyeon Jo | 330d1c2 | 2019-07-30 11:58:41 -0700 | [diff] [blame] | 992 | } |
| 993 | |
| 994 | |
Changyeon Jo | 10a80fe | 2020-08-02 11:57:47 -0700 | [diff] [blame] | 995 | using android::base::Result; |
| 996 | using android::base::Error; |
| 997 | Result<void> EvsV4lCamera::startDumpFrames(const std::string& path) { |
| 998 | struct stat info; |
| 999 | if (stat(path.c_str(), &info) != 0) { |
| 1000 | return Error(BAD_VALUE) << "Cannot access " << path; |
| 1001 | } else if (!(info.st_mode & S_IFDIR)) { |
| 1002 | return Error(BAD_VALUE) << path << " is not a directory"; |
| 1003 | } |
| 1004 | |
| 1005 | mDumpPath = path; |
| 1006 | mDumpFrame = true; |
| 1007 | |
| 1008 | return {}; |
| 1009 | } |
| 1010 | |
| 1011 | |
| 1012 | Result<void> EvsV4lCamera::stopDumpFrames() { |
| 1013 | if (!mDumpFrame) { |
| 1014 | return Error(INVALID_OPERATION) << "Device is not dumping frames"; |
| 1015 | } |
| 1016 | |
| 1017 | mDumpFrame = false; |
| 1018 | return {}; |
| 1019 | } |
| 1020 | |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 1021 | } // namespace implementation |
Changyeon Jo | c7deb56 | 2019-07-23 10:40:04 -0700 | [diff] [blame] | 1022 | } // namespace V1_1 |
Scott Randolph | b342cb1 | 2017-04-25 17:38:29 -0700 | [diff] [blame] | 1023 | } // namespace evs |
| 1024 | } // namespace automotive |
| 1025 | } // namespace hardware |
| 1026 | } // namespace android |