blob: 2ecbe7bf812294863b84d0a1cb1ff8e7e94e4bf1 [file] [log] [blame]
Haoxiang Li35d2a702020-04-10 01:19:32 +00001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#define LOG_TAG "SurroundViewService"
17
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070018#include "SurroundView2dSession.h"
19
Haoxiang Li35d2a702020-04-10 01:19:32 +000020#include <android-base/logging.h>
21#include <android/hardware_buffer.h>
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070022#include <system/camera_metadata.h>
Haoxiang Li35d2a702020-04-10 01:19:32 +000023#include <utils/SystemClock.h>
24
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070025#include <thread>
Haoxiang Li35d2a702020-04-10 01:19:32 +000026
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070027#include <android/hardware/camera/device/3.2/ICameraDevice.h>
28
Haoxiang Li0c078242020-06-10 16:59:29 -070029#include "CameraUtils.h"
30
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070031using ::android::hardware::automotive::evs::V1_0::EvsResult;
32using ::android::hardware::camera::device::V3_2::Stream;
33
34using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
Haoxiang Li35d2a702020-04-10 01:19:32 +000035
36namespace android {
37namespace hardware {
38namespace automotive {
39namespace sv {
40namespace V1_0 {
41namespace implementation {
42
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070043// TODO(b/158479099): There are a lot of redundant code between 2d and 3d.
44// Decrease the degree of redundancy.
45typedef struct {
46 int32_t id;
47 int32_t width;
48 int32_t height;
49 int32_t format;
50 int32_t direction;
51 int32_t framerate;
52} RawStreamConfig;
53
54static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
Haoxiang Lia4d8de42020-04-10 01:19:32 +000055static const uint8_t kGrayColor = 128;
Haoxiang Li35d2a702020-04-10 01:19:32 +000056static const int kNumChannels = 3;
Haoxiang Lia091e4a2020-06-06 20:38:46 -070057static const int kNumFrames = 4;
Haoxiang Lica7719a2020-06-03 20:51:11 -070058static const int kSv2dViewId = 0;
59
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070060SurroundView2dSession::FramesHandler::FramesHandler(
61 sp<IEvsCamera> pCamera, sp<SurroundView2dSession> pSession)
62 : mCamera(pCamera),
63 mSession(pSession) {}
64
65Return<void> SurroundView2dSession::FramesHandler::deliverFrame(
66 const BufferDesc_1_0& bufDesc_1_0) {
67 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
68 mCamera->doneWithFrame(bufDesc_1_0);
69
Haoxiang Lia091e4a2020-06-06 20:38:46 -070070 return {};
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070071}
72
73Return<void> SurroundView2dSession::FramesHandler::deliverFrame_1_1(
74 const hidl_vec<BufferDesc_1_1>& buffers) {
75 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
76 mSession->mSequenceId++;
77
Haoxiang Lie3011cc2020-06-10 21:33:26 -070078 {
79 scoped_lock<mutex> lock(mSession->mAccessLock);
80 if (mSession->mProcessingEvsFrames) {
81 LOG(WARNING) << "EVS frames are being processed. Skip frames:" << mSession->mSequenceId;
82 mCamera->doneWithFrame_1_1(buffers);
83 return {};
84 }
85 }
86
Haoxiang Lia091e4a2020-06-06 20:38:46 -070087 if (buffers.size() != kNumFrames) {
88 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
89 << ", which is different from the number " << kNumFrames
90 << ", specified in config file";
91 return {};
92 }
93
94 {
95 scoped_lock<mutex> lock(mSession->mAccessLock);
96 for (int i = 0; i < kNumFrames; i++) {
97 LOG(DEBUG) << "Copying buffer No." << i
98 << " to Surround View Service";
99 mSession->copyFromBufferToPointers(buffers[i],
100 mSession->mInputPointers[i]);
101 }
102 }
103
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700104 mCamera->doneWithFrame_1_1(buffers);
105
106 // Notify the session that a new set of frames is ready
107 {
108 scoped_lock<mutex> lock(mSession->mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700109 mSession->mProcessingEvsFrames = true;
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700110 }
111 mSession->mFramesSignal.notify_all();
112
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700113 return {};
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700114}
115
116Return<void> SurroundView2dSession::FramesHandler::notify(const EvsEventDesc& event) {
117 switch(event.aType) {
118 case EvsEventType::STREAM_STOPPED:
119 {
120 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
121
122 // TODO(b/158339680): There is currently an issue in EVS reference
123 // implementation that causes STREAM_STOPPED event to be delivered
124 // properly. When the bug is fixed, we should deal with this event
125 // properly in case the EVS stream is stopped unexpectly.
126 break;
127 }
128
129 case EvsEventType::PARAMETER_CHANGED:
130 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
131 << " is set to " << event.payload[1];
132 break;
133
134 // Below events are ignored in reference implementation.
135 case EvsEventType::STREAM_STARTED:
136 [[fallthrough]];
137 case EvsEventType::FRAME_DROPPED:
138 [[fallthrough]];
139 case EvsEventType::TIMEOUT:
140 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
141 << "is received but ignored.";
142 break;
143 default:
144 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
145 break;
146 }
147
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700148 return {};
149}
150
151bool SurroundView2dSession::copyFromBufferToPointers(
152 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
153
154 AHardwareBuffer_Desc* pDesc =
155 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
156
157 // create a GraphicBuffer from the existing handle
158 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
159 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
160 pDesc->height, pDesc->format, pDesc->layers,
161 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
162
163 if (inputBuffer == nullptr) {
164 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
165 // Returning "true" in this error condition because we already released the
166 // previous image (if any) and so the texture may change in unpredictable
167 // ways now!
168 return false;
169 } else {
170 LOG(INFO) << "Managed to allocate GraphicBuffer with "
171 << " width: " << pDesc->width
172 << " height: " << pDesc->height
173 << " format: " << pDesc->format
174 << " stride: " << pDesc->stride;
175 }
176
177 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
178 // lock, return false.
179 void* inputDataPtr;
180 inputBuffer->lock(
181 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
182 &inputDataPtr);
183 if (!inputDataPtr) {
184 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
185 inputBuffer->unlock();
186 return false;
187 } else {
188 LOG(INFO) << "Managed to get read access to GraphicBuffer";
189 }
190
191 int stride = pDesc->stride;
192
193 // readPtr comes from EVS, and it is with 4 channels
194 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
195
196 // writePtr comes from CV imread, and it is with 3 channels
197 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
198
199 for (int i=0; i<pDesc->width; i++)
200 for (int j=0; j<pDesc->height; j++) {
201 writePtr[(i + j * stride) * 3 + 0] =
202 readPtr[(i + j * stride) * 4 + 0];
203 writePtr[(i + j * stride) * 3 + 1] =
204 readPtr[(i + j * stride) * 4 + 1];
205 writePtr[(i + j * stride) * 3 + 2] =
206 readPtr[(i + j * stride) * 4 + 2];
207 }
208 LOG(INFO) << "Brute force copying finished";
209
210 return true;
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700211}
212
Haoxiang Lica7719a2020-06-03 20:51:11 -0700213void SurroundView2dSession::processFrames() {
214 while (true) {
215 {
216 unique_lock<mutex> lock(mAccessLock);
217
218 if (mStreamState != RUNNING) {
219 break;
220 }
221
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700222 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
Haoxiang Lica7719a2020-06-03 20:51:11 -0700223 }
224
225 handleFrames(mSequenceId);
226
227 {
228 // Set the boolean to false to receive the next set of frames.
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700229 scoped_lock<mutex> lock(mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700230 mProcessingEvsFrames = false;
Haoxiang Lica7719a2020-06-03 20:51:11 -0700231 }
232 }
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700233
234 // Notify the SV client that no new results will be delivered.
235 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
236 mStream->notify(SvEvent::STREAM_STOPPED);
237
238 {
239 scoped_lock<mutex> lock(mAccessLock);
240 mStreamState = STOPPED;
241 mStream = nullptr;
242 LOG(DEBUG) << "Stream marked STOPPED.";
243 }
Haoxiang Lica7719a2020-06-03 20:51:11 -0700244}
Haoxiang Li35d2a702020-04-10 01:19:32 +0000245
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700246SurroundView2dSession::SurroundView2dSession(sp<IEvsEnumerator> pEvs)
247 : mEvs(pEvs),
248 mStreamState(STOPPED) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000249 mEvsCameraIds = {"0", "1", "2", "3"};
250}
251
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700252SurroundView2dSession::~SurroundView2dSession() {
253 // In case the client did not call stopStream properly, we should stop the
254 // stream explicitly. Otherwise the process thread will take forever to
255 // join.
256 stopStream();
257
258 // Waiting for the process thread to finish the buffered frames.
259 if (mProcessThread.joinable()) {
260 mProcessThread.join();
261 }
262
263 mEvs->closeCamera(mCamera);
264}
265
Haoxiang Li35d2a702020-04-10 01:19:32 +0000266// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession
267Return<SvResult> SurroundView2dSession::startStream(
268 const sp<ISurroundViewStream>& stream) {
269 LOG(DEBUG) << __FUNCTION__;
270 scoped_lock<mutex> lock(mAccessLock);
271
272 if (!mIsInitialized && !initialize()) {
273 LOG(ERROR) << "There is an error while initializing the use case. "
274 << "Exiting";
275 return SvResult::INTERNAL_ERROR;
276 }
277
278 if (mStreamState != STOPPED) {
279 LOG(ERROR) << "Ignoring startVideoStream call"
280 << "when a stream is already running.";
281 return SvResult::INTERNAL_ERROR;
282 }
283
284 if (stream == nullptr) {
285 LOG(ERROR) << "The input stream is invalid";
286 return SvResult::INTERNAL_ERROR;
287 }
288 mStream = stream;
289
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700290 mSequenceId = 0;
291 startEvs();
292
293 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
294 // reference implementation yet. Once implemented, this logic should be
295 // moved to EVS notify callback.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000296 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
297 mStream->notify(SvEvent::STREAM_STARTED);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700298 mProcessingEvsFrames = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000299
300 // Start the frame generation thread
301 mStreamState = RUNNING;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000302
Haoxiang Lica7719a2020-06-03 20:51:11 -0700303 mProcessThread = thread([this]() {
304 processFrames();
305 });
306
Haoxiang Li35d2a702020-04-10 01:19:32 +0000307 return SvResult::OK;
308}
309
310Return<void> SurroundView2dSession::stopStream() {
311 LOG(DEBUG) << __FUNCTION__;
312 unique_lock<mutex> lock(mAccessLock);
313
314 if (mStreamState == RUNNING) {
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700315 // Tell the processFrames loop to stop processing frames
Haoxiang Li35d2a702020-04-10 01:19:32 +0000316 mStreamState = STOPPING;
317
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700318 // Stop the EVS stream asynchronizely
319 mCamera->stopVideoStream();
320 mFramesHandler = nullptr;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000321 }
322
323 return {};
324}
325
326Return<void> SurroundView2dSession::doneWithFrames(
327 const SvFramesDesc& svFramesDesc){
328 LOG(DEBUG) << __FUNCTION__;
329 scoped_lock <mutex> lock(mAccessLock);
330
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700331 mFramesRecord.inUse = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000332
333 (void)svFramesDesc;
334 return {};
335}
336
337// Methods from ISurroundView2dSession follow.
338Return<void> SurroundView2dSession::get2dMappingInfo(
339 get2dMappingInfo_cb _hidl_cb) {
340 LOG(DEBUG) << __FUNCTION__;
341
342 _hidl_cb(mInfo);
343 return {};
344}
345
346Return<SvResult> SurroundView2dSession::set2dConfig(
347 const Sv2dConfig& sv2dConfig) {
348 LOG(DEBUG) << __FUNCTION__;
349 scoped_lock <mutex> lock(mAccessLock);
350
351 if (sv2dConfig.width <=0 || sv2dConfig.width > 4096) {
352 LOG(WARNING) << "The width of 2d config is out of the range (0, 4096]"
353 << "Ignored!";
354 return SvResult::INVALID_ARG;
355 }
356
357 mConfig.width = sv2dConfig.width;
358 mConfig.blending = sv2dConfig.blending;
359 mHeight = mConfig.width * mInfo.height / mInfo.width;
360
361 if (mStream != nullptr) {
362 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
363 mStream->notify(SvEvent::CONFIG_UPDATED);
364 }
365
366 return SvResult::OK;
367}
368
369Return<void> SurroundView2dSession::get2dConfig(get2dConfig_cb _hidl_cb) {
370 LOG(DEBUG) << __FUNCTION__;
371
372 _hidl_cb(mConfig);
373 return {};
374}
375
376Return<void> SurroundView2dSession::projectCameraPoints(
377 const hidl_vec<Point2dInt>& points2dCamera,
378 const hidl_string& cameraId,
379 projectCameraPoints_cb _hidl_cb) {
380 LOG(DEBUG) << __FUNCTION__;
381 scoped_lock <mutex> lock(mAccessLock);
382
383 bool cameraIdFound = false;
384 for (auto& evsCameraId : mEvsCameraIds) {
385 if (cameraId == evsCameraId) {
386 cameraIdFound = true;
387 LOG(INFO) << "Camera id found.";
388 break;
389 }
390 }
391
392 if (!cameraIdFound) {
393 LOG(ERROR) << "Camera id not found.";
394 _hidl_cb({});
395 return {};
396 }
397
398 hidl_vec<Point2dFloat> outPoints;
399 outPoints.resize(points2dCamera.size());
400
401 int width = mConfig.width;
402 int height = mHeight;
403 for (int i=0; i<points2dCamera.size(); i++) {
404 // Assuming all the points in the image frame can be projected into 2d
405 // Surround View space. Otherwise cannot.
406 if (points2dCamera[i].x < 0 || points2dCamera[i].x > width-1 ||
407 points2dCamera[i].y < 0 || points2dCamera[i].y > height-1) {
408 LOG(WARNING) << __FUNCTION__
409 << ": gets invalid 2d camera points. Ignored";
410 outPoints[i].isValid = false;
411 outPoints[i].x = 10000;
412 outPoints[i].y = 10000;
413 } else {
414 outPoints[i].isValid = true;
415 outPoints[i].x = 0;
416 outPoints[i].y = 0;
417 }
418 }
419
420 _hidl_cb(outPoints);
421 return {};
422}
423
Haoxiang Lica7719a2020-06-03 20:51:11 -0700424bool SurroundView2dSession::handleFrames(int sequenceId) {
425 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
426
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700427 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
428 // output frame is supported. Implement buffer queue for both of them.
429 {
430 scoped_lock<mutex> lock(mAccessLock);
431
432 if (mFramesRecord.inUse) {
433 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
434 mStream->notify(SvEvent::FRAME_DROPPED);
435 return true;
436 }
437 }
438
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700439 if (mOutputWidth != mConfig.width || mOutputHeight != mHeight) {
440 LOG(DEBUG) << "Config changed. Re-allocate memory."
441 << " Old width: "
442 << mOutputWidth
443 << " Old height: "
444 << mOutputHeight
445 << " New width: "
446 << mConfig.width
447 << " New height: "
448 << mHeight;
449 delete[] static_cast<char*>(mOutputPointer.data_pointer);
450 mOutputWidth = mConfig.width;
451 mOutputHeight = mHeight;
452 mOutputPointer.height = mOutputHeight;
453 mOutputPointer.width = mOutputWidth;
454 mOutputPointer.format = Format::RGB;
455 mOutputPointer.data_pointer =
456 new char[mOutputHeight * mOutputWidth * kNumChannels];
457
458 if (!mOutputPointer.data_pointer) {
459 LOG(ERROR) << "Memory allocation failed. Exiting.";
460 return false;
461 }
462
463 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
464 mSurroundView->Update2dOutputResolution(size);
465
466 mSvTexture = new GraphicBuffer(mOutputWidth,
467 mOutputHeight,
468 HAL_PIXEL_FORMAT_RGB_888,
469 1,
470 GRALLOC_USAGE_HW_TEXTURE,
471 "SvTexture");
472 if (mSvTexture->initCheck() == OK) {
473 LOG(INFO) << "Successfully allocated Graphic Buffer";
474 } else {
475 LOG(ERROR) << "Failed to allocate Graphic Buffer";
476 return false;
477 }
478 }
479
Haoxiang Lica7719a2020-06-03 20:51:11 -0700480 if (mSurroundView->Get2dSurroundView(mInputPointers, &mOutputPointer)) {
481 LOG(INFO) << "Get2dSurroundView succeeded";
482 } else {
483 LOG(ERROR) << "Get2dSurroundView failed. "
484 << "Using memset to initialize to gray";
485 memset(mOutputPointer.data_pointer, kGrayColor,
486 mOutputHeight * mOutputWidth * kNumChannels);
487 }
488
489 void* textureDataPtr = nullptr;
490 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
491 | GRALLOC_USAGE_SW_READ_NEVER,
492 &textureDataPtr);
493 if (!textureDataPtr) {
494 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
495 return false;
496 }
497
498 // Note: there is a chance that the stride of the texture is not the same
499 // as the width. For example, when the input frame is 1920 * 1080, the
500 // width is 1080, but the stride is 2048. So we'd better copy the data line
501 // by line, instead of single memcpy.
502 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
503 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
504 const int readStride = mOutputWidth * kNumChannels;
505 const int writeStride = mSvTexture->getStride() * kNumChannels;
506 if (readStride == writeStride) {
507 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
508 } else {
509 for (int i=0; i<mSvTexture->getHeight(); i++) {
510 memcpy(writePtr, readPtr, readStride);
511 writePtr = writePtr + writeStride;
512 readPtr = readPtr + readStride;
513 }
514 }
515 LOG(DEBUG) << "memcpy finished";
516 mSvTexture->unlock();
517
518 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
519 LOG(DEBUG) << "ANativeWindowBuffer->handle: "
520 << buffer->handle;
521
Haoxiang Lica7719a2020-06-03 20:51:11 -0700522 {
523 scoped_lock<mutex> lock(mAccessLock);
524
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700525 mFramesRecord.frames.svBuffers.resize(1);
526 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
527 svBuffer.viewId = kSv2dViewId;
528 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
529 AHardwareBuffer_Desc* pDesc =
530 reinterpret_cast<AHardwareBuffer_Desc*>(
531 &svBuffer.hardwareBuffer.description);
532 pDesc->width = mOutputWidth;
533 pDesc->height = mOutputHeight;
534 pDesc->layers = 1;
535 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
536 pDesc->stride = mSvTexture->getStride();
537 pDesc->format = HAL_PIXEL_FORMAT_RGB_888;
538 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
539 mFramesRecord.frames.sequenceId = sequenceId;
540
541 mFramesRecord.inUse = true;
542 mStream->receiveFrames(mFramesRecord.frames);
Haoxiang Lica7719a2020-06-03 20:51:11 -0700543 }
544
545 return true;
546}
547
Haoxiang Li35d2a702020-04-10 01:19:32 +0000548bool SurroundView2dSession::initialize() {
549 lock_guard<mutex> lock(mAccessLock, adopt_lock);
550
551 // TODO(b/150412555): ask core-lib team to add API description for "create"
552 // method in the .h file.
553 // The create method will never return a null pointer based the API
554 // description.
555 mSurroundView = unique_ptr<SurroundView>(Create());
556
Haoxiang Liab820892020-05-20 08:50:20 -0700557 SurroundViewStaticDataParams params =
558 SurroundViewStaticDataParams(GetCameras(),
559 Get2dParams(),
560 Get3dParams(),
561 GetUndistortionScales(),
562 GetBoundingBox(),
563 map<string, CarTexture>(),
564 map<string, CarPart>());
565 mSurroundView->SetStaticData(params);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000566
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700567 mInputPointers.resize(4);
568 // TODO(b/157498737): the following parameters should be fed from config
569 // files. Remove the hard-coding values once I/O module is ready.
570 for (int i=0; i<4; i++) {
571 mInputPointers[i].width = 1920;
572 mInputPointers[i].height = 1024;
573 mInputPointers[i].format = Format::RGB;
574 mInputPointers[i].cpu_data_pointer =
575 (void*) new uint8_t[mInputPointers[i].width *
576 mInputPointers[i].height *
577 kNumChannels];
Haoxiang Li35d2a702020-04-10 01:19:32 +0000578 }
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700579 LOG(INFO) << "Allocated 4 input pointers";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000580
581 mOutputWidth = Get2dParams().resolution.width;
582 mOutputHeight = Get2dParams().resolution.height;
583
584 mConfig.width = mOutputWidth;
585 mConfig.blending = SvQuality::HIGH;
586 mHeight = mOutputHeight;
587
588 mOutputPointer.height = mOutputHeight;
589 mOutputPointer.width = mOutputWidth;
590 mOutputPointer.format = mInputPointers[0].format;
591 mOutputPointer.data_pointer = new char[
592 mOutputHeight * mOutputWidth * kNumChannels];
593
594 if (!mOutputPointer.data_pointer) {
595 LOG(ERROR) << "Memory allocation failed. Exiting.";
596 return false;
597 }
598
599 mSvTexture = new GraphicBuffer(mOutputWidth,
600 mOutputHeight,
601 HAL_PIXEL_FORMAT_RGB_888,
602 1,
603 GRALLOC_USAGE_HW_TEXTURE,
604 "SvTexture");
605
606 //TODO(b/150412555): the 2d mapping info should be read from config file.
607 mInfo.width = 8;
608 mInfo.height = 6;
609 mInfo.center.isValid = true;
610 mInfo.center.x = 0;
611 mInfo.center.y = 0;
612
613 if (mSvTexture->initCheck() == OK) {
614 LOG(INFO) << "Successfully allocated Graphic Buffer";
615 } else {
616 LOG(ERROR) << "Failed to allocate Graphic Buffer";
617 return false;
618 }
619
620 if (mSurroundView->Start2dPipeline()) {
621 LOG(INFO) << "Start2dPipeline succeeded";
622 } else {
623 LOG(ERROR) << "Start2dPipeline failed";
624 return false;
625 }
626
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700627 if (!setupEvs()) {
628 LOG(ERROR) << "Failed to setup EVS components for 2d session";
629 return false;
630 }
631
Haoxiang Li35d2a702020-04-10 01:19:32 +0000632 mIsInitialized = true;
633 return true;
634}
635
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700636bool SurroundView2dSession::setupEvs() {
637 // Setup for EVS
638 // TODO(b/157498737): We are using hard-coded camera "group0" here. It
639 // should be read from configuration file once I/O module is ready.
640 LOG(INFO) << "Requesting camera list";
641 mEvs->getCameraList_1_1([this] (hidl_vec<CameraDesc> cameraList) {
642 LOG(INFO) << "Camera list callback received " << cameraList.size();
643 for (auto&& cam : cameraList) {
644 LOG(INFO) << "Found camera " << cam.v1.cameraId;
645 if (cam.v1.cameraId == "group0") {
646 mCameraDesc = cam;
647 }
648 }
649 });
650
651 bool foundCfg = false;
652 std::unique_ptr<Stream> targetCfg(new Stream());
653
654 // This logic picks the configuration with the largest area that supports
655 // RGBA8888 format
656 int32_t maxArea = 0;
657 camera_metadata_entry_t streamCfgs;
658 if (!find_camera_metadata_entry(
659 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
660 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
661 &streamCfgs)) {
662 // Stream configurations are found in metadata
663 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
664 streamCfgs.data.i32);
665 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
666 if (ptr->direction ==
667 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
668 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
669
670 if (ptr->width * ptr->height > maxArea) {
671 targetCfg->id = ptr->id;
672 targetCfg->width = ptr->width;
673 targetCfg->height = ptr->height;
674
675 // This client always wants below input data format
676 targetCfg->format =
677 static_cast<GraphicsPixelFormat>(
678 HAL_PIXEL_FORMAT_RGBA_8888);
679
680 maxArea = ptr->width * ptr->height;
681
682 foundCfg = true;
683 }
684 }
685 ++ptr;
686 }
687 } else {
688 LOG(WARNING) << "No stream configuration data is found; "
689 << "default parameters will be used.";
690 }
691
692 if (!foundCfg) {
693 LOG(INFO) << "No config was found";
694 targetCfg = nullptr;
695 return false;
696 }
697
698 string camId = mCameraDesc.v1.cameraId.c_str();
699 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
700 if (mCamera == nullptr) {
701 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
702 return false;
703 } else {
704 LOG(INFO) << "Camera " << camId << " is opened successfully";
705 }
706
Haoxiang Li0c078242020-06-10 16:59:29 -0700707 // TODO(b/156101189): camera position information is needed from the
708 // I/O module.
709 vector<string> cameraIds = getPhysicalCameraIds(mCamera);
710 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
711
712 for (auto& id : cameraIds) {
713 AndroidCameraParams params;
714 if (getAndroidCameraParams(mCamera, id, params)) {
715 cameraIdToAndroidParameters.emplace(id, params);
716 LOG(INFO) << "Camera parameters are fetched successfully for "
717 << "physical camera: " << id;
718 } else {
719 LOG(ERROR) << "Failed to get camera parameters for "
720 << "physical camera: " << id;
721 return false;
722 }
723 }
724
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700725 return true;
726}
727
728bool SurroundView2dSession::startEvs() {
729 mFramesHandler = new FramesHandler(mCamera, this);
730 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
731 if (result != EvsResult::OK) {
732 LOG(ERROR) << "Failed to start video stream";
733 return false;
734 } else {
735 LOG(INFO) << "Video stream was started successfully";
736 }
737
738 return true;
739}
740
Haoxiang Li35d2a702020-04-10 01:19:32 +0000741} // namespace implementation
742} // namespace V1_0
743} // namespace sv
744} // namespace automotive
745} // namespace hardware
746} // namespace android
747