blob: 2c7861db7a17505380971262c7d231e928ccea04 [file] [log] [blame]
Haoxiang Li35d2a702020-04-10 01:19:32 +00001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Haoxiang Li35d2a702020-04-10 01:19:32 +000016
Haoxiang Li070f17d2020-05-28 14:06:25 -070017#include "SurroundView3dSession.h"
18
Haoxiang Li35d2a702020-04-10 01:19:32 +000019#include <android-base/logging.h>
20#include <android/hardware_buffer.h>
21#include <android/hidl/memory/1.0/IMemory.h>
22#include <hidlmemory/mapping.h>
Haoxiang Li070f17d2020-05-28 14:06:25 -070023#include <system/camera_metadata.h>
Haoxiang Li35d2a702020-04-10 01:19:32 +000024#include <utils/SystemClock.h>
25
Haoxiang Liab820892020-05-20 08:50:20 -070026#include <array>
Haoxiang Li070f17d2020-05-28 14:06:25 -070027#include <thread>
Haoxiang Liab820892020-05-20 08:50:20 -070028#include <set>
29
Haoxiang Li070f17d2020-05-28 14:06:25 -070030#include <android/hardware/camera/device/3.2/ICameraDevice.h>
31
Haoxiang Li0c078242020-06-10 16:59:29 -070032#include "CameraUtils.h"
Haoxiang Li35d2a702020-04-10 01:19:32 +000033#include "sv_3d_params.h"
34
Haoxiang Li1e106962020-06-24 22:22:25 -070035using ::std::adopt_lock;
36using ::std::array;
37using ::std::lock;
38using ::std::lock_guard;
39using ::std::map;
40using ::std::mutex;
41using ::std::scoped_lock;
42using ::std::set;
43using ::std::string;
44using ::std::thread;
45using ::std::unique_lock;
46using ::std::unique_ptr;
47using ::std::vector;
48
Haoxiang Li070f17d2020-05-28 14:06:25 -070049using ::android::hardware::automotive::evs::V1_0::EvsResult;
50using ::android::hardware::camera::device::V3_2::Stream;
Haoxiang Li35d2a702020-04-10 01:19:32 +000051using ::android::hardware::hidl_memory;
Haoxiang Li070f17d2020-05-28 14:06:25 -070052using ::android::hidl::memory::V1_0::IMemory;
53
54using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
Haoxiang Li35d2a702020-04-10 01:19:32 +000055
56namespace android {
57namespace hardware {
58namespace automotive {
59namespace sv {
60namespace V1_0 {
61namespace implementation {
62
Haoxiang Li070f17d2020-05-28 14:06:25 -070063typedef struct {
64 int32_t id;
65 int32_t width;
66 int32_t height;
67 int32_t format;
68 int32_t direction;
69 int32_t framerate;
70} RawStreamConfig;
71
72static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
Haoxiang Lia4d8de42020-04-10 01:19:32 +000073static const uint8_t kGrayColor = 128;
Haoxiang Lif4d84282020-06-11 21:37:18 -070074static const int kNumFrames = 4;
Haoxiang Li35d2a702020-04-10 01:19:32 +000075static const int kNumChannels = 4;
Haoxiang Li1e106962020-06-24 22:22:25 -070076static const float kUndistortionScales[4] = {1.0f, 1.0f, 1.0f, 1.0f};
Haoxiang Li070f17d2020-05-28 14:06:25 -070077
78SurroundView3dSession::FramesHandler::FramesHandler(
79 sp<IEvsCamera> pCamera, sp<SurroundView3dSession> pSession)
80 : mCamera(pCamera),
81 mSession(pSession) {}
82
83Return<void> SurroundView3dSession::FramesHandler::deliverFrame(
84 const BufferDesc_1_0& bufDesc_1_0) {
85 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
86 mCamera->doneWithFrame(bufDesc_1_0);
87
Haoxiang Lif4d84282020-06-11 21:37:18 -070088 return {};
Haoxiang Li070f17d2020-05-28 14:06:25 -070089}
90
91Return<void> SurroundView3dSession::FramesHandler::deliverFrame_1_1(
92 const hidl_vec<BufferDesc_1_1>& buffers) {
93 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
Haoxiang Lie3011cc2020-06-10 21:33:26 -070094 mSession->mSequenceId++;
95
96 {
97 scoped_lock<mutex> lock(mSession->mAccessLock);
98 if (mSession->mProcessingEvsFrames) {
Haoxiang Lif4d84282020-06-11 21:37:18 -070099 LOG(WARNING) << "EVS frames are being processed. Skip frames:"
100 << mSession->mSequenceId;
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700101 mCamera->doneWithFrame_1_1(buffers);
102 return {};
Haoxiang Lifb31b0c2020-06-28 23:08:27 -0700103 } else {
104 // Sets the flag to true immediately so the new coming frames will
105 // be skipped.
106 mSession->mProcessingEvsFrames = true;
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700107 }
108 }
Haoxiang Li070f17d2020-05-28 14:06:25 -0700109
Haoxiang Lif4d84282020-06-11 21:37:18 -0700110 if (buffers.size() != kNumFrames) {
Haoxiang Lif912bdb2020-07-08 15:25:47 -0700111 scoped_lock<mutex> lock(mSession->mAccessLock);
Haoxiang Lif4d84282020-06-11 21:37:18 -0700112 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
113 << ", which is different from the number " << kNumFrames
114 << ", specified in config file";
Haoxiang Lif912bdb2020-07-08 15:25:47 -0700115 mSession->mProcessingEvsFrames = false;
116 mCamera->doneWithFrame_1_1(buffers);
Haoxiang Lif4d84282020-06-11 21:37:18 -0700117 return {};
118 }
119
120 {
121 scoped_lock<mutex> lock(mSession->mAccessLock);
Haoxiang Lif912bdb2020-07-08 15:25:47 -0700122
123 // The incoming frames may not follow the same order as listed cameras.
124 // We should re-order them following the camera ids listed in camera
125 // config.
126 vector<int> indices;
127 for (const auto& id
128 : mSession->mIOModuleConfig->cameraConfig.evsCameraIds) {
129 for (int i = 0; i < kNumFrames; i++) {
130 if (buffers[i].deviceId == id) {
131 indices.emplace_back(i);
132 break;
133 }
134 }
135 }
136
137 // If the size of indices is smaller than the kNumFrames, it means that
138 // there is frame(s) that comes from different camera(s) than we
139 // expected.
140 if (indices.size() != kNumFrames) {
141 LOG(ERROR) << "The frames are not from the cameras we expected!";
142 mSession->mProcessingEvsFrames = false;
143 mCamera->doneWithFrame_1_1(buffers);
144 return {};
145 }
146
Haoxiang Lif4d84282020-06-11 21:37:18 -0700147 for (int i = 0; i < kNumFrames; i++) {
Haoxiang Lif912bdb2020-07-08 15:25:47 -0700148 LOG(DEBUG) << "Copying buffer from camera ["
149 << buffers[indices[i]].deviceId
150 << "] to Surround View Service";
151 mSession->copyFromBufferToPointers(buffers[indices[i]],
Haoxiang Lif4d84282020-06-11 21:37:18 -0700152 mSession->mInputPointers[i]);
153 }
154 }
155
Haoxiang Li070f17d2020-05-28 14:06:25 -0700156 mCamera->doneWithFrame_1_1(buffers);
157
158 // Notify the session that a new set of frames is ready
Haoxiang Li070f17d2020-05-28 14:06:25 -0700159 mSession->mFramesSignal.notify_all();
160
Haoxiang Lif4d84282020-06-11 21:37:18 -0700161 return {};
Haoxiang Li070f17d2020-05-28 14:06:25 -0700162}
163
164Return<void> SurroundView3dSession::FramesHandler::notify(const EvsEventDesc& event) {
165 switch(event.aType) {
166 case EvsEventType::STREAM_STOPPED:
Haoxiang Li55c24462020-06-25 22:50:41 -0700167 // The Surround View STREAM_STOPPED event is generated when the
168 // service finished processing the queued frames. So it does not
169 // rely on the Evs STREAM_STOPPED event.
Haoxiang Li070f17d2020-05-28 14:06:25 -0700170 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
Haoxiang Li070f17d2020-05-28 14:06:25 -0700171 break;
172
173 case EvsEventType::PARAMETER_CHANGED:
174 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
175 << " is set to " << event.payload[1];
176 break;
177
178 // Below events are ignored in reference implementation.
179 case EvsEventType::STREAM_STARTED:
180 [[fallthrough]];
181 case EvsEventType::FRAME_DROPPED:
182 [[fallthrough]];
183 case EvsEventType::TIMEOUT:
184 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
185 << "is received but ignored.";
186 break;
187 default:
188 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
189 break;
190 }
191
Haoxiang Lif4d84282020-06-11 21:37:18 -0700192 return {};
193}
194
195bool SurroundView3dSession::copyFromBufferToPointers(
196 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
197
198 AHardwareBuffer_Desc* pDesc =
199 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
200
201 // create a GraphicBuffer from the existing handle
202 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
203 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
204 pDesc->height, pDesc->format, pDesc->layers,
205 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
206
207 if (inputBuffer == nullptr) {
208 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
209 // Returning "true" in this error condition because we already released the
210 // previous image (if any) and so the texture may change in unpredictable
211 // ways now!
212 return false;
213 } else {
214 LOG(INFO) << "Managed to allocate GraphicBuffer with "
215 << " width: " << pDesc->width
216 << " height: " << pDesc->height
217 << " format: " << pDesc->format
218 << " stride: " << pDesc->stride;
219 }
220
221 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
222 // lock, return false.
223 void* inputDataPtr;
224 inputBuffer->lock(
225 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
226 &inputDataPtr);
227 if (!inputDataPtr) {
228 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
229 inputBuffer->unlock();
230 return false;
231 } else {
232 LOG(INFO) << "Managed to get read access to GraphicBuffer";
233 }
234
235 int stride = pDesc->stride;
236
237 // readPtr comes from EVS, and it is with 4 channels
238 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
239
240 // writePtr is with 3 channels, since that is what SV core lib expects.
241 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
242
243 for (int i = 0; i < pDesc->width; i++)
244 for (int j = 0; j < pDesc->height; j++) {
245 writePtr[(i + j * stride) * 3 + 0] =
246 readPtr[(i + j * stride) * 4 + 0];
247 writePtr[(i + j * stride) * 3 + 1] =
248 readPtr[(i + j * stride) * 4 + 1];
249 writePtr[(i + j * stride) * 3 + 2] =
250 readPtr[(i + j * stride) * 4 + 2];
251 }
252 LOG(INFO) << "Brute force copying finished";
253
254 return true;
Haoxiang Li070f17d2020-05-28 14:06:25 -0700255}
Haoxiang Li35d2a702020-04-10 01:19:32 +0000256
Haoxiang Li3730d172020-05-21 08:26:21 -0700257void SurroundView3dSession::processFrames() {
258 if (mSurroundView->Start3dPipeline()) {
259 LOG(INFO) << "Start3dPipeline succeeded";
260 } else {
261 LOG(ERROR) << "Start3dPipeline failed";
262 return;
263 }
264
Haoxiang Li070f17d2020-05-28 14:06:25 -0700265 while (true) {
Haoxiang Li3730d172020-05-21 08:26:21 -0700266 {
267 unique_lock<mutex> lock(mAccessLock);
268
Haoxiang Li070f17d2020-05-28 14:06:25 -0700269 if (mStreamState != RUNNING) {
270 break;
271 }
272
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700273 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
Haoxiang Li3730d172020-05-21 08:26:21 -0700274 }
275
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700276 handleFrames(mSequenceId);
Haoxiang Li3730d172020-05-21 08:26:21 -0700277
278 {
279 // Set the boolean to false to receive the next set of frames.
Haoxiang Li070f17d2020-05-28 14:06:25 -0700280 scoped_lock<mutex> lock(mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700281 mProcessingEvsFrames = false;
Haoxiang Li3730d172020-05-21 08:26:21 -0700282 }
283 }
Haoxiang Li070f17d2020-05-28 14:06:25 -0700284
285 // Notify the SV client that no new results will be delivered.
286 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
287 mStream->notify(SvEvent::STREAM_STOPPED);
288
289 {
290 scoped_lock<mutex> lock(mAccessLock);
291 mStreamState = STOPPED;
292 mStream = nullptr;
293 LOG(DEBUG) << "Stream marked STOPPED.";
294 }
Haoxiang Li3730d172020-05-21 08:26:21 -0700295}
296
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700297SurroundView3dSession::SurroundView3dSession(sp<IEvsEnumerator> pEvs,
298 VhalHandler* vhalHandler,
Haoxiang Lif7120b42020-06-12 12:45:36 -0700299 AnimationModule* animationModule,
300 IOModuleConfig* pConfig) :
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700301 mEvs(pEvs),
302 mStreamState(STOPPED),
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700303 mVhalHandler(vhalHandler),
Haoxiang Lif7120b42020-06-12 12:45:36 -0700304 mAnimationModule(animationModule),
305 mIOModuleConfig(pConfig) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000306 mEvsCameraIds = {"0" , "1", "2", "3"};
307}
308
Haoxiang Li070f17d2020-05-28 14:06:25 -0700309SurroundView3dSession::~SurroundView3dSession() {
310 // In case the client did not call stopStream properly, we should stop the
311 // stream explicitly. Otherwise the process thread will take forever to
312 // join.
313 stopStream();
314
315 // Waiting for the process thread to finish the buffered frames.
Haoxiang Li55c24462020-06-25 22:50:41 -0700316 if (mProcessThread.joinable()) {
317 mProcessThread.join();
318 }
Haoxiang Li070f17d2020-05-28 14:06:25 -0700319
320 mEvs->closeCamera(mCamera);
321}
322
Haoxiang Li35d2a702020-04-10 01:19:32 +0000323// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession.
324Return<SvResult> SurroundView3dSession::startStream(
325 const sp<ISurroundViewStream>& stream) {
326 LOG(DEBUG) << __FUNCTION__;
327 scoped_lock<mutex> lock(mAccessLock);
328
329 if (!mIsInitialized && !initialize()) {
330 LOG(ERROR) << "There is an error while initializing the use case. "
331 << "Exiting";
332 return SvResult::INTERNAL_ERROR;
333 }
334
335 if (mStreamState != STOPPED) {
336 LOG(ERROR) << "Ignoring startVideoStream call when a stream is "
337 << "already running.";
338 return SvResult::INTERNAL_ERROR;
339 }
340
341 if (mViews.empty()) {
342 LOG(ERROR) << "No views have been set for current Surround View"
343 << "3d Session. Please call setViews before starting"
344 << "the stream.";
345 return SvResult::VIEW_NOT_SET;
346 }
347
348 if (stream == nullptr) {
349 LOG(ERROR) << "The input stream is invalid";
350 return SvResult::INTERNAL_ERROR;
351 }
352 mStream = stream;
353
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700354 mSequenceId = 0;
Haoxiang Li070f17d2020-05-28 14:06:25 -0700355 startEvs();
356
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700357 if (mVhalHandler != nullptr) {
358 if (!mVhalHandler->startPropertiesUpdate()) {
359 LOG(WARNING) << "VhalHandler cannot be started properly";
360 }
361 } else {
362 LOG(WARNING) << "VhalHandler is null. Ignored";
363 }
364
Haoxiang Li070f17d2020-05-28 14:06:25 -0700365 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
366 // reference implementation yet. Once implemented, this logic should be
367 // moved to EVS notify callback.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000368 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
369 mStream->notify(SvEvent::STREAM_STARTED);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700370 mProcessingEvsFrames = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000371
372 // Start the frame generation thread
373 mStreamState = RUNNING;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000374
Haoxiang Li3730d172020-05-21 08:26:21 -0700375 mProcessThread = thread([this]() {
376 processFrames();
377 });
378
Haoxiang Li35d2a702020-04-10 01:19:32 +0000379 return SvResult::OK;
380}
381
382Return<void> SurroundView3dSession::stopStream() {
383 LOG(DEBUG) << __FUNCTION__;
384 unique_lock <mutex> lock(mAccessLock);
385
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700386 if (mVhalHandler != nullptr) {
387 mVhalHandler->stopPropertiesUpdate();
388 } else {
389 LOG(WARNING) << "VhalHandler is null. Ignored";
390 }
391
Haoxiang Li35d2a702020-04-10 01:19:32 +0000392 if (mStreamState == RUNNING) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700393 // Tell the processFrames loop to stop processing frames
Haoxiang Li35d2a702020-04-10 01:19:32 +0000394 mStreamState = STOPPING;
395
Haoxiang Li070f17d2020-05-28 14:06:25 -0700396 // Stop the EVS stream asynchronizely
397 mCamera->stopVideoStream();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000398 }
399
400 return {};
401}
402
403Return<void> SurroundView3dSession::doneWithFrames(
404 const SvFramesDesc& svFramesDesc){
405 LOG(DEBUG) << __FUNCTION__;
406 scoped_lock <mutex> lock(mAccessLock);
407
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700408 mFramesRecord.inUse = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000409
410 (void)svFramesDesc;
411 return {};
412}
413
414// Methods from ISurroundView3dSession follow.
415Return<SvResult> SurroundView3dSession::setViews(
416 const hidl_vec<View3d>& views) {
417 LOG(DEBUG) << __FUNCTION__;
418 scoped_lock <mutex> lock(mAccessLock);
419
420 mViews.resize(views.size());
421 for (int i=0; i<views.size(); i++) {
422 mViews[i] = views[i];
423 }
424
425 return SvResult::OK;
426}
427
428Return<SvResult> SurroundView3dSession::set3dConfig(const Sv3dConfig& sv3dConfig) {
429 LOG(DEBUG) << __FUNCTION__;
430 scoped_lock <mutex> lock(mAccessLock);
431
432 if (sv3dConfig.width <=0 || sv3dConfig.width > 4096) {
433 LOG(WARNING) << "The width of 3d config is out of the range (0, 4096]"
434 << "Ignored!";
435 return SvResult::INVALID_ARG;
436 }
437
438 if (sv3dConfig.height <=0 || sv3dConfig.height > 4096) {
439 LOG(WARNING) << "The height of 3d config is out of the range (0, 4096]"
440 << "Ignored!";
441 return SvResult::INVALID_ARG;
442 }
443
444 mConfig.width = sv3dConfig.width;
445 mConfig.height = sv3dConfig.height;
446 mConfig.carDetails = sv3dConfig.carDetails;
447
448 if (mStream != nullptr) {
449 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
450 mStream->notify(SvEvent::CONFIG_UPDATED);
451 }
452
453 return SvResult::OK;
454}
455
456Return<void> SurroundView3dSession::get3dConfig(get3dConfig_cb _hidl_cb) {
457 LOG(DEBUG) << __FUNCTION__;
458
459 _hidl_cb(mConfig);
460 return {};
461}
462
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700463bool VerifyAndGetOverlays(const OverlaysData& overlaysData, std::vector<Overlay>* svCoreOverlays) {
464 // Clear the overlays.
465 svCoreOverlays->clear();
466
Haoxiang Li35d2a702020-04-10 01:19:32 +0000467 // Check size of shared memory matches overlaysMemoryDesc.
468 const int kVertexSize = 16;
469 const int kIdSize = 2;
470 int memDescSize = 0;
471 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
472 memDescSize += kIdSize + kVertexSize * overlayMemDesc.verticesCount;
473 }
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700474 if (overlaysData.overlaysMemory.size() < memDescSize) {
475 LOG(ERROR) << "Allocated shared memory size is less than overlaysMemoryDesc size.";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000476 return false;
477 }
478
479 // Map memory.
480 sp<IMemory> pSharedMemory = mapMemory(overlaysData.overlaysMemory);
481 if(pSharedMemory == nullptr) {
482 LOG(ERROR) << "mapMemory failed.";
483 return false;
484 }
485
486 // Get Data pointer.
487 uint8_t* pData = static_cast<uint8_t*>(
488 static_cast<void*>(pSharedMemory->getPointer()));
489 if (pData == nullptr) {
490 LOG(ERROR) << "Shared memory getPointer() failed.";
491 return false;
492 }
493
494 int idOffset = 0;
495 set<uint16_t> overlayIdSet;
496 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
497
498 if (overlayIdSet.find(overlayMemDesc.id) != overlayIdSet.end()) {
499 LOG(ERROR) << "Duplicate id within memory descriptor.";
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700500 svCoreOverlays->clear();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000501 return false;
502 }
503 overlayIdSet.insert(overlayMemDesc.id);
504
505 if(overlayMemDesc.verticesCount < 3) {
506 LOG(ERROR) << "Less than 3 vertices.";
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700507 svCoreOverlays->clear();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000508 return false;
509 }
510
511 if (overlayMemDesc.overlayPrimitive == OverlayPrimitive::TRIANGLES &&
512 overlayMemDesc.verticesCount % 3 != 0) {
513 LOG(ERROR) << "Triangles primitive does not have vertices "
514 << "multiple of 3.";
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700515 svCoreOverlays->clear();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000516 return false;
517 }
518
519 const uint16_t overlayId = *((uint16_t*)(pData + idOffset));
520
521 if (overlayId != overlayMemDesc.id) {
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700522 LOG(ERROR) << "Overlay id mismatch " << overlayId << ", " << overlayMemDesc.id;
523 svCoreOverlays->clear();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000524 return false;
525 }
526
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700527 // Copy over shared memory data to sv core overlays.
528 Overlay svCoreOverlay;
529 svCoreOverlay.id = overlayMemDesc.id;
530 svCoreOverlay.vertices.resize(overlayMemDesc.verticesCount);
531 uint8_t* verticesDataPtr = pData + idOffset + kIdSize;
532 memcpy(svCoreOverlay.vertices.data(), verticesDataPtr,
533 kVertexSize * overlayMemDesc.verticesCount);
534 svCoreOverlays->push_back(svCoreOverlay);
535
Haoxiang Li35d2a702020-04-10 01:19:32 +0000536 idOffset += kIdSize + (kVertexSize * overlayMemDesc.verticesCount);
537 }
538
539 return true;
540}
541
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700542Return<SvResult> SurroundView3dSession::updateOverlays(const OverlaysData& overlaysData) {
543 LOG(DEBUG) << __FUNCTION__;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000544
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700545 scoped_lock <mutex> lock(mAccessLock);
546 if(!VerifyAndGetOverlays(overlaysData, &mOverlays)) {
547 LOG(ERROR) << "VerifyAndGetOverlays failed.";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000548 return SvResult::INVALID_ARG;
549 }
550
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700551 mOverlayIsUpdated = true;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000552 return SvResult::OK;
553}
554
555Return<void> SurroundView3dSession::projectCameraPointsTo3dSurface(
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700556 const hidl_vec<Point2dInt>& cameraPoints, const hidl_string& cameraId,
557 projectCameraPointsTo3dSurface_cb _hidl_cb) {
558 LOG(DEBUG) << __FUNCTION__;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000559 bool cameraIdFound = false;
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700560 int cameraIndex = 0;
561 std::vector<Point3dFloat> points3d;
562
563 // Note: mEvsCameraIds must be in the order front, right, rear, left.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000564 for (auto& evsCameraId : mEvsCameraIds) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700565 if (cameraId == evsCameraId) {
566 cameraIdFound = true;
567 LOG(DEBUG) << "Camera id found for projection: " << cameraId;
568 break;
569 }
570 cameraIndex++;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000571 }
572
573 if (!cameraIdFound) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700574 LOG(ERROR) << "Camera id not found for projection: " << cameraId;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000575 _hidl_cb(points3d);
576 return {};
577 }
578
579 for (const auto& cameraPoint : cameraPoints) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700580 Point3dFloat point3d = {false, 0.0, 0.0, 0.0};
581
582 // Verify if camera point is within the camera resolution bounds.
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700583 const Size2dInteger cameraSize = mCameraParams[cameraIndex].size;
584 point3d.isValid = (cameraPoint.x >= 0 && cameraPoint.x < cameraSize.width &&
585 cameraPoint.y >= 0 && cameraPoint.y < cameraSize.height);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000586 if (!point3d.isValid) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700587 LOG(WARNING) << "Camera point (" << cameraPoint.x << ", " << cameraPoint.y
588 << ") is out of camera resolution bounds.";
589 points3d.push_back(point3d);
590 continue;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000591 }
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700592
593 // Project points using mSurroundView function.
594 const Coordinate2dInteger camCoord(cameraPoint.x, cameraPoint.y);
595 Coordinate3dFloat projPoint3d(0.0, 0.0, 0.0);
596 point3d.isValid =
597 mSurroundView->GetProjectionPointFromRawCameraToSurroundView3d(camCoord,
598 cameraIndex,
599 &projPoint3d);
Tanmay Patil4b19e7e2020-06-22 14:33:51 -0700600 // Convert projPoint3d in meters to point3d which is in milli-meters.
601 point3d.x = projPoint3d.x * 1000.0;
602 point3d.y = projPoint3d.y * 1000.0;
603 point3d.z = projPoint3d.z * 1000.0;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000604 points3d.push_back(point3d);
605 }
606 _hidl_cb(points3d);
607 return {};
608}
609
Haoxiang Li3730d172020-05-21 08:26:21 -0700610bool SurroundView3dSession::handleFrames(int sequenceId) {
Haoxiang Li3730d172020-05-21 08:26:21 -0700611 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
612
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700613 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
614 // output frame is supported. Implement buffer queue for both of them.
615 {
616 scoped_lock<mutex> lock(mAccessLock);
617
618 if (mFramesRecord.inUse) {
619 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
620 mStream->notify(SvEvent::FRAME_DROPPED);
621 return true;
622 }
623 }
624
Haoxiang Li070f17d2020-05-28 14:06:25 -0700625 // If the width/height was changed, re-allocate the data pointer.
626 if (mOutputWidth != mConfig.width
627 || mOutputHeight != mConfig.height) {
628 LOG(DEBUG) << "Config changed. Re-allocate memory. "
629 << "Old width: "
630 << mOutputWidth
631 << ", old height: "
632 << mOutputHeight
633 << "; New width: "
634 << mConfig.width
635 << ", new height: "
636 << mConfig.height;
637 delete[] static_cast<char*>(mOutputPointer.data_pointer);
638 mOutputWidth = mConfig.width;
639 mOutputHeight = mConfig.height;
640 mOutputPointer.height = mOutputHeight;
641 mOutputPointer.width = mOutputWidth;
642 mOutputPointer.format = Format::RGBA;
643 mOutputPointer.data_pointer =
644 new char[mOutputHeight * mOutputWidth * kNumChannels];
645
646 if (!mOutputPointer.data_pointer) {
647 LOG(ERROR) << "Memory allocation failed. Exiting.";
648 return false;
649 }
650
651 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
652 mSurroundView->Update3dOutputResolution(size);
653
654 mSvTexture = new GraphicBuffer(mOutputWidth,
655 mOutputHeight,
656 HAL_PIXEL_FORMAT_RGBA_8888,
657 1,
658 GRALLOC_USAGE_HW_TEXTURE,
659 "SvTexture");
660 if (mSvTexture->initCheck() == OK) {
661 LOG(INFO) << "Successfully allocated Graphic Buffer";
662 } else {
663 LOG(ERROR) << "Failed to allocate Graphic Buffer";
664 return false;
665 }
666 }
667
Tanmay Patil16c1d3c2020-07-27 14:23:54 -0700668 // Set 3d overlays.
669 {
670 scoped_lock<mutex> lock(mAccessLock);
671 if (mOverlayIsUpdated) {
672 if (!mSurroundView->Set3dOverlay(mOverlays)) {
673 LOG(ERROR) << "Set 3d overlays failed.";
674 }
675 mOverlayIsUpdated = false;
676 }
677 }
678
Haoxiang Li3730d172020-05-21 08:26:21 -0700679 // TODO(b/150412555): do not use the setViews for frames generation
680 // since there is a discrepancy between the HIDL APIs and core lib APIs.
681 array<array<float, 4>, 4> matrix;
682
683 // TODO(b/150412555): use hard-coded views for now. Change view every
684 // frame.
685 int recViewId = sequenceId % 16;
686 for (int i=0; i<4; i++)
687 for (int j=0; j<4; j++) {
688 matrix[i][j] = kRecViews[recViewId][i*4+j];
689 }
690
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700691 // Get the latest VHal property values
692 if (mVhalHandler != nullptr) {
693 if (!mVhalHandler->getPropertyValues(&mPropertyValues)) {
694 LOG(ERROR) << "Failed to get property values";
695 }
696 } else {
697 LOG(WARNING) << "VhalHandler is null. Ignored";
698 }
699
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700700 vector<AnimationParam> params;
701 if (mAnimationModule != nullptr) {
702 params = mAnimationModule->getUpdatedAnimationParams(mPropertyValues);
703 } else {
704 LOG(WARNING) << "AnimationModule is null. Ignored";
705 }
706
707 if (!params.empty()) {
708 mSurroundView->SetAnimations(params);
709 } else {
710 LOG(INFO) << "AnimationParams is empty. Ignored";
711 }
712
Haoxiang Li3730d172020-05-21 08:26:21 -0700713 if (mSurroundView->Get3dSurroundView(
714 mInputPointers, matrix, &mOutputPointer)) {
715 LOG(INFO) << "Get3dSurroundView succeeded";
716 } else {
717 LOG(ERROR) << "Get3dSurroundView failed. "
718 << "Using memset to initialize to gray.";
719 memset(mOutputPointer.data_pointer, kGrayColor,
720 mOutputHeight * mOutputWidth * kNumChannels);
721 }
722
723 void* textureDataPtr = nullptr;
724 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
725 | GRALLOC_USAGE_SW_READ_NEVER,
726 &textureDataPtr);
727 if (!textureDataPtr) {
728 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
729 return false;
730 }
731
732 // Note: there is a chance that the stride of the texture is not the
733 // same as the width. For example, when the input frame is 1920 * 1080,
734 // the width is 1080, but the stride is 2048. So we'd better copy the
735 // data line by line, instead of single memcpy.
736 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
737 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
738 const int readStride = mOutputWidth * kNumChannels;
739 const int writeStride = mSvTexture->getStride() * kNumChannels;
740 if (readStride == writeStride) {
741 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
742 } else {
743 for (int i=0; i<mSvTexture->getHeight(); i++) {
744 memcpy(writePtr, readPtr, readStride);
745 writePtr = writePtr + writeStride;
746 readPtr = readPtr + readStride;
747 }
748 }
749 LOG(INFO) << "memcpy finished!";
750 mSvTexture->unlock();
751
752 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
753 LOG(DEBUG) << "ANativeWindowBuffer->handle: " << buffer->handle;
754
Haoxiang Li3730d172020-05-21 08:26:21 -0700755 {
756 scoped_lock<mutex> lock(mAccessLock);
757
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700758 mFramesRecord.frames.svBuffers.resize(1);
759 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
760 svBuffer.viewId = 0;
761 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
762 AHardwareBuffer_Desc* pDesc =
763 reinterpret_cast<AHardwareBuffer_Desc *>(
764 &svBuffer.hardwareBuffer.description);
765 pDesc->width = mOutputWidth;
766 pDesc->height = mOutputHeight;
767 pDesc->layers = 1;
768 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
769 pDesc->stride = mSvTexture->getStride();
770 pDesc->format = HAL_PIXEL_FORMAT_RGBA_8888;
771 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
772 mFramesRecord.frames.sequenceId = sequenceId;
773
774 mFramesRecord.inUse = true;
775 mStream->receiveFrames(mFramesRecord.frames);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000776 }
777
Haoxiang Li3730d172020-05-21 08:26:21 -0700778 return true;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000779}
780
781bool SurroundView3dSession::initialize() {
782 lock_guard<mutex> lock(mAccessLock, adopt_lock);
783
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700784 if (!setupEvs()) {
785 LOG(ERROR) << "Failed to setup EVS components for 3d session";
786 return false;
787 }
788
Haoxiang Li35d2a702020-04-10 01:19:32 +0000789 // TODO(b/150412555): ask core-lib team to add API description for "create"
790 // method in the .h file.
791 // The create method will never return a null pointer based the API
792 // description.
793 mSurroundView = unique_ptr<SurroundView>(Create());
794
Haoxiang Liab820892020-05-20 08:50:20 -0700795 SurroundViewStaticDataParams params =
Haoxiang Lif7120b42020-06-12 12:45:36 -0700796 SurroundViewStaticDataParams(
797 mCameraParams,
798 mIOModuleConfig->sv2dConfig.sv2dParams,
799 mIOModuleConfig->sv3dConfig.sv3dParams,
Haoxiang Li1e106962020-06-24 22:22:25 -0700800 vector<float>(std::begin(kUndistortionScales),
801 std::end(kUndistortionScales)),
Haoxiang Lif7120b42020-06-12 12:45:36 -0700802 mIOModuleConfig->sv2dConfig.carBoundingBox,
803 mIOModuleConfig->carModelConfig.carModel.texturesMap,
804 mIOModuleConfig->carModelConfig.carModel.partsMap);
Haoxiang Liab820892020-05-20 08:50:20 -0700805 mSurroundView->SetStaticData(params);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000806
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700807 mInputPointers.resize(kNumFrames);
808 for (int i = 0; i < kNumFrames; i++) {
809 mInputPointers[i].width = mCameraParams[i].size.width;
810 mInputPointers[i].height = mCameraParams[i].size.height;
Haoxiang Lif4d84282020-06-11 21:37:18 -0700811 mInputPointers[i].format = Format::RGB;
812 mInputPointers[i].cpu_data_pointer =
813 (void*)new uint8_t[mInputPointers[i].width *
814 mInputPointers[i].height *
815 kNumChannels];
Haoxiang Li35d2a702020-04-10 01:19:32 +0000816 }
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700817 LOG(INFO) << "Allocated " << kNumFrames << " input pointers";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000818
Haoxiang Lif7120b42020-06-12 12:45:36 -0700819 mOutputWidth = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.width;
820 mOutputHeight = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.height;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000821
822 mConfig.width = mOutputWidth;
823 mConfig.height = mOutputHeight;
824 mConfig.carDetails = SvQuality::HIGH;
825
826 mOutputPointer.height = mOutputHeight;
827 mOutputPointer.width = mOutputWidth;
828 mOutputPointer.format = Format::RGBA;
829 mOutputPointer.data_pointer = new char[
830 mOutputHeight * mOutputWidth * kNumChannels];
831
832 if (!mOutputPointer.data_pointer) {
833 LOG(ERROR) << "Memory allocation failed. Exiting.";
834 return false;
835 }
836
837 mSvTexture = new GraphicBuffer(mOutputWidth,
838 mOutputHeight,
839 HAL_PIXEL_FORMAT_RGBA_8888,
840 1,
841 GRALLOC_USAGE_HW_TEXTURE,
842 "SvTexture");
843
844 if (mSvTexture->initCheck() == OK) {
845 LOG(INFO) << "Successfully allocated Graphic Buffer";
846 } else {
847 LOG(ERROR) << "Failed to allocate Graphic Buffer";
848 return false;
849 }
850
Haoxiang Li070f17d2020-05-28 14:06:25 -0700851
Haoxiang Li35d2a702020-04-10 01:19:32 +0000852 mIsInitialized = true;
853 return true;
854}
855
Haoxiang Li070f17d2020-05-28 14:06:25 -0700856bool SurroundView3dSession::setupEvs() {
Haoxiang Lif7120b42020-06-12 12:45:36 -0700857 // Reads the camera related information from the config object
858 const string evsGroupId = mIOModuleConfig->cameraConfig.evsGroupId;
859
Haoxiang Li070f17d2020-05-28 14:06:25 -0700860 // Setup for EVS
Haoxiang Li070f17d2020-05-28 14:06:25 -0700861 LOG(INFO) << "Requesting camera list";
Haoxiang Lif7120b42020-06-12 12:45:36 -0700862 mEvs->getCameraList_1_1(
863 [this, evsGroupId] (hidl_vec<CameraDesc> cameraList) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700864 LOG(INFO) << "Camera list callback received " << cameraList.size();
865 for (auto&& cam : cameraList) {
866 LOG(INFO) << "Found camera " << cam.v1.cameraId;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700867 if (cam.v1.cameraId == evsGroupId) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700868 mCameraDesc = cam;
869 }
870 }
871 });
872
873 bool foundCfg = false;
874 std::unique_ptr<Stream> targetCfg(new Stream());
875
876 // This logic picks the configuration with the largest area that supports
877 // RGBA8888 format
878 int32_t maxArea = 0;
879 camera_metadata_entry_t streamCfgs;
880 if (!find_camera_metadata_entry(
881 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
882 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
883 &streamCfgs)) {
884 // Stream configurations are found in metadata
885 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
886 streamCfgs.data.i32);
887 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
888 if (ptr->direction ==
889 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
890 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
891
892 if (ptr->width * ptr->height > maxArea) {
893 targetCfg->id = ptr->id;
894 targetCfg->width = ptr->width;
895 targetCfg->height = ptr->height;
896
897 // This client always wants below input data format
898 targetCfg->format =
899 static_cast<GraphicsPixelFormat>(
900 HAL_PIXEL_FORMAT_RGBA_8888);
901
902 maxArea = ptr->width * ptr->height;
903
904 foundCfg = true;
905 }
906 }
907 ++ptr;
908 }
909 } else {
910 LOG(WARNING) << "No stream configuration data is found; "
911 << "default parameters will be used.";
912 }
913
914 if (!foundCfg) {
915 LOG(INFO) << "No config was found";
916 targetCfg = nullptr;
917 return false;
918 }
919
920 string camId = mCameraDesc.v1.cameraId.c_str();
921 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
922 if (mCamera == nullptr) {
923 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
924 return false;
925 } else {
926 LOG(INFO) << "Camera " << camId << " is opened successfully";
927 }
928
Haoxiang Li0c078242020-06-10 16:59:29 -0700929 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700930 for (const auto& id : mIOModuleConfig->cameraConfig.evsCameraIds) {
Haoxiang Li0c078242020-06-10 16:59:29 -0700931 AndroidCameraParams params;
932 if (getAndroidCameraParams(mCamera, id, params)) {
933 cameraIdToAndroidParameters.emplace(id, params);
934 LOG(INFO) << "Camera parameters are fetched successfully for "
935 << "physical camera: " << id;
936 } else {
937 LOG(ERROR) << "Failed to get camera parameters for "
938 << "physical camera: " << id;
939 return false;
940 }
941 }
942
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700943 mCameraParams =
944 convertToSurroundViewCameraParams(cameraIdToAndroidParameters);
945
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700946 for (auto& camera : mCameraParams) {
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700947 camera.size.width = targetCfg->width;
948 camera.size.height = targetCfg->height;
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700949 camera.circular_fov = 179;
950 }
951
Haoxiang Li070f17d2020-05-28 14:06:25 -0700952 return true;
953}
954
955bool SurroundView3dSession::startEvs() {
956 mFramesHandler = new FramesHandler(mCamera, this);
957 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
958 if (result != EvsResult::OK) {
959 LOG(ERROR) << "Failed to start video stream";
960 return false;
961 } else {
962 LOG(INFO) << "Video stream was started successfully";
963 }
964
965 return true;
966}
967
Haoxiang Li35d2a702020-04-10 01:19:32 +0000968} // namespace implementation
969} // namespace V1_0
970} // namespace sv
971} // namespace automotive
972} // namespace hardware
973} // namespace android