blob: 97af815ee799b1961c903b694e1d8164ee231777 [file] [log] [blame]
Haoxiang Li35d2a702020-04-10 01:19:32 +00001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#define LOG_TAG "SurroundViewService"
17
Haoxiang Li070f17d2020-05-28 14:06:25 -070018#include "SurroundView3dSession.h"
19
Haoxiang Li35d2a702020-04-10 01:19:32 +000020#include <android-base/logging.h>
21#include <android/hardware_buffer.h>
22#include <android/hidl/memory/1.0/IMemory.h>
23#include <hidlmemory/mapping.h>
Haoxiang Li070f17d2020-05-28 14:06:25 -070024#include <system/camera_metadata.h>
Haoxiang Li35d2a702020-04-10 01:19:32 +000025#include <utils/SystemClock.h>
26
Haoxiang Liab820892020-05-20 08:50:20 -070027#include <array>
Haoxiang Li070f17d2020-05-28 14:06:25 -070028#include <thread>
Haoxiang Liab820892020-05-20 08:50:20 -070029#include <set>
30
Haoxiang Li070f17d2020-05-28 14:06:25 -070031#include <android/hardware/camera/device/3.2/ICameraDevice.h>
32
Haoxiang Li35d2a702020-04-10 01:19:32 +000033#include "sv_3d_params.h"
34
Haoxiang Li070f17d2020-05-28 14:06:25 -070035using ::android::hardware::automotive::evs::V1_0::EvsResult;
36using ::android::hardware::camera::device::V3_2::Stream;
Haoxiang Li35d2a702020-04-10 01:19:32 +000037using ::android::hardware::hidl_memory;
Haoxiang Li070f17d2020-05-28 14:06:25 -070038using ::android::hidl::memory::V1_0::IMemory;
39
40using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
Haoxiang Li35d2a702020-04-10 01:19:32 +000041
42namespace android {
43namespace hardware {
44namespace automotive {
45namespace sv {
46namespace V1_0 {
47namespace implementation {
48
Haoxiang Li070f17d2020-05-28 14:06:25 -070049typedef struct {
50 int32_t id;
51 int32_t width;
52 int32_t height;
53 int32_t format;
54 int32_t direction;
55 int32_t framerate;
56} RawStreamConfig;
57
58static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
Haoxiang Lia4d8de42020-04-10 01:19:32 +000059static const uint8_t kGrayColor = 128;
Haoxiang Li35d2a702020-04-10 01:19:32 +000060static const int kNumChannels = 4;
Haoxiang Li070f17d2020-05-28 14:06:25 -070061
62SurroundView3dSession::FramesHandler::FramesHandler(
63 sp<IEvsCamera> pCamera, sp<SurroundView3dSession> pSession)
64 : mCamera(pCamera),
65 mSession(pSession) {}
66
67Return<void> SurroundView3dSession::FramesHandler::deliverFrame(
68 const BufferDesc_1_0& bufDesc_1_0) {
69 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
70 mCamera->doneWithFrame(bufDesc_1_0);
71
72 return Void();
73}
74
75Return<void> SurroundView3dSession::FramesHandler::deliverFrame_1_1(
76 const hidl_vec<BufferDesc_1_1>& buffers) {
77 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
78 mSession->sequenceId++;
79
80 // TODO(b/157498592): Use EVS frames for SV stitching.
81 mCamera->doneWithFrame_1_1(buffers);
82
83 // Notify the session that a new set of frames is ready
84 {
85 scoped_lock<mutex> lock(mSession->mAccessLock);
86 mSession->mFramesAvailable = true;
87 }
88 mSession->mFramesSignal.notify_all();
89
90 return Void();
91}
92
93Return<void> SurroundView3dSession::FramesHandler::notify(const EvsEventDesc& event) {
94 switch(event.aType) {
95 case EvsEventType::STREAM_STOPPED:
96 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
97
98 // TODO(b/158339680): There is currently an issue in EVS reference
99 // implementation that causes STREAM_STOPPED event to be delivered
100 // properly. When the bug is fixed, we should deal with this event
101 // properly in case the EVS stream is stopped unexpectly.
102 break;
103
104 case EvsEventType::PARAMETER_CHANGED:
105 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
106 << " is set to " << event.payload[1];
107 break;
108
109 // Below events are ignored in reference implementation.
110 case EvsEventType::STREAM_STARTED:
111 [[fallthrough]];
112 case EvsEventType::FRAME_DROPPED:
113 [[fallthrough]];
114 case EvsEventType::TIMEOUT:
115 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
116 << "is received but ignored.";
117 break;
118 default:
119 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
120 break;
121 }
122
123 return Void();
124}
Haoxiang Li35d2a702020-04-10 01:19:32 +0000125
Haoxiang Li3730d172020-05-21 08:26:21 -0700126void SurroundView3dSession::processFrames() {
127 if (mSurroundView->Start3dPipeline()) {
128 LOG(INFO) << "Start3dPipeline succeeded";
129 } else {
130 LOG(ERROR) << "Start3dPipeline failed";
131 return;
132 }
133
Haoxiang Li070f17d2020-05-28 14:06:25 -0700134 while (true) {
Haoxiang Li3730d172020-05-21 08:26:21 -0700135 {
136 unique_lock<mutex> lock(mAccessLock);
137
Haoxiang Li070f17d2020-05-28 14:06:25 -0700138 if (mStreamState != RUNNING) {
139 break;
140 }
141
142 mFramesSignal.wait(lock, [this]() {
143 return mFramesAvailable;
Haoxiang Li3730d172020-05-21 08:26:21 -0700144 });
145 }
146
147 handleFrames(sequenceId);
148
149 {
150 // Set the boolean to false to receive the next set of frames.
Haoxiang Li070f17d2020-05-28 14:06:25 -0700151 scoped_lock<mutex> lock(mAccessLock);
152 mFramesAvailable = false;
Haoxiang Li3730d172020-05-21 08:26:21 -0700153 }
154 }
Haoxiang Li070f17d2020-05-28 14:06:25 -0700155
156 // Notify the SV client that no new results will be delivered.
157 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
158 mStream->notify(SvEvent::STREAM_STOPPED);
159
160 {
161 scoped_lock<mutex> lock(mAccessLock);
162 mStreamState = STOPPED;
163 mStream = nullptr;
164 LOG(DEBUG) << "Stream marked STOPPED.";
165 }
Haoxiang Li3730d172020-05-21 08:26:21 -0700166}
167
Haoxiang Li070f17d2020-05-28 14:06:25 -0700168SurroundView3dSession::SurroundView3dSession(sp<IEvsEnumerator> pEvs)
169 : mEvs(pEvs),
170 mStreamState(STOPPED) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000171 mEvsCameraIds = {"0" , "1", "2", "3"};
172}
173
Haoxiang Li070f17d2020-05-28 14:06:25 -0700174SurroundView3dSession::~SurroundView3dSession() {
175 // In case the client did not call stopStream properly, we should stop the
176 // stream explicitly. Otherwise the process thread will take forever to
177 // join.
178 stopStream();
179
180 // Waiting for the process thread to finish the buffered frames.
181 mProcessThread.join();
182
183 mEvs->closeCamera(mCamera);
184}
185
Haoxiang Li35d2a702020-04-10 01:19:32 +0000186// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession.
187Return<SvResult> SurroundView3dSession::startStream(
188 const sp<ISurroundViewStream>& stream) {
189 LOG(DEBUG) << __FUNCTION__;
190 scoped_lock<mutex> lock(mAccessLock);
191
192 if (!mIsInitialized && !initialize()) {
193 LOG(ERROR) << "There is an error while initializing the use case. "
194 << "Exiting";
195 return SvResult::INTERNAL_ERROR;
196 }
197
198 if (mStreamState != STOPPED) {
199 LOG(ERROR) << "Ignoring startVideoStream call when a stream is "
200 << "already running.";
201 return SvResult::INTERNAL_ERROR;
202 }
203
204 if (mViews.empty()) {
205 LOG(ERROR) << "No views have been set for current Surround View"
206 << "3d Session. Please call setViews before starting"
207 << "the stream.";
208 return SvResult::VIEW_NOT_SET;
209 }
210
211 if (stream == nullptr) {
212 LOG(ERROR) << "The input stream is invalid";
213 return SvResult::INTERNAL_ERROR;
214 }
215 mStream = stream;
216
Haoxiang Li070f17d2020-05-28 14:06:25 -0700217 sequenceId = 0;
218 startEvs();
219
220 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
221 // reference implementation yet. Once implemented, this logic should be
222 // moved to EVS notify callback.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000223 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
224 mStream->notify(SvEvent::STREAM_STARTED);
225
226 // Start the frame generation thread
227 mStreamState = RUNNING;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000228
Haoxiang Li3730d172020-05-21 08:26:21 -0700229 mProcessThread = thread([this]() {
230 processFrames();
231 });
232
Haoxiang Li35d2a702020-04-10 01:19:32 +0000233 return SvResult::OK;
234}
235
236Return<void> SurroundView3dSession::stopStream() {
237 LOG(DEBUG) << __FUNCTION__;
238 unique_lock <mutex> lock(mAccessLock);
239
240 if (mStreamState == RUNNING) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700241 // Tell the processFrames loop to stop processing frames
Haoxiang Li35d2a702020-04-10 01:19:32 +0000242 mStreamState = STOPPING;
243
Haoxiang Li070f17d2020-05-28 14:06:25 -0700244 // Stop the EVS stream asynchronizely
245 mCamera->stopVideoStream();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000246 }
247
248 return {};
249}
250
251Return<void> SurroundView3dSession::doneWithFrames(
252 const SvFramesDesc& svFramesDesc){
253 LOG(DEBUG) << __FUNCTION__;
254 scoped_lock <mutex> lock(mAccessLock);
255
256 framesRecord.inUse = false;
257
258 (void)svFramesDesc;
259 return {};
260}
261
262// Methods from ISurroundView3dSession follow.
263Return<SvResult> SurroundView3dSession::setViews(
264 const hidl_vec<View3d>& views) {
265 LOG(DEBUG) << __FUNCTION__;
266 scoped_lock <mutex> lock(mAccessLock);
267
268 mViews.resize(views.size());
269 for (int i=0; i<views.size(); i++) {
270 mViews[i] = views[i];
271 }
272
273 return SvResult::OK;
274}
275
276Return<SvResult> SurroundView3dSession::set3dConfig(const Sv3dConfig& sv3dConfig) {
277 LOG(DEBUG) << __FUNCTION__;
278 scoped_lock <mutex> lock(mAccessLock);
279
280 if (sv3dConfig.width <=0 || sv3dConfig.width > 4096) {
281 LOG(WARNING) << "The width of 3d config is out of the range (0, 4096]"
282 << "Ignored!";
283 return SvResult::INVALID_ARG;
284 }
285
286 if (sv3dConfig.height <=0 || sv3dConfig.height > 4096) {
287 LOG(WARNING) << "The height of 3d config is out of the range (0, 4096]"
288 << "Ignored!";
289 return SvResult::INVALID_ARG;
290 }
291
292 mConfig.width = sv3dConfig.width;
293 mConfig.height = sv3dConfig.height;
294 mConfig.carDetails = sv3dConfig.carDetails;
295
296 if (mStream != nullptr) {
297 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
298 mStream->notify(SvEvent::CONFIG_UPDATED);
299 }
300
301 return SvResult::OK;
302}
303
304Return<void> SurroundView3dSession::get3dConfig(get3dConfig_cb _hidl_cb) {
305 LOG(DEBUG) << __FUNCTION__;
306
307 _hidl_cb(mConfig);
308 return {};
309}
310
311bool VerifyOverlayData(const OverlaysData& overlaysData) {
312 // Check size of shared memory matches overlaysMemoryDesc.
313 const int kVertexSize = 16;
314 const int kIdSize = 2;
315 int memDescSize = 0;
316 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
317 memDescSize += kIdSize + kVertexSize * overlayMemDesc.verticesCount;
318 }
319 if (memDescSize != overlaysData.overlaysMemory.size()) {
320 LOG(ERROR) << "shared memory and overlaysMemoryDesc size mismatch.";
321 return false;
322 }
323
324 // Map memory.
325 sp<IMemory> pSharedMemory = mapMemory(overlaysData.overlaysMemory);
326 if(pSharedMemory == nullptr) {
327 LOG(ERROR) << "mapMemory failed.";
328 return false;
329 }
330
331 // Get Data pointer.
332 uint8_t* pData = static_cast<uint8_t*>(
333 static_cast<void*>(pSharedMemory->getPointer()));
334 if (pData == nullptr) {
335 LOG(ERROR) << "Shared memory getPointer() failed.";
336 return false;
337 }
338
339 int idOffset = 0;
340 set<uint16_t> overlayIdSet;
341 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
342
343 if (overlayIdSet.find(overlayMemDesc.id) != overlayIdSet.end()) {
344 LOG(ERROR) << "Duplicate id within memory descriptor.";
345 return false;
346 }
347 overlayIdSet.insert(overlayMemDesc.id);
348
349 if(overlayMemDesc.verticesCount < 3) {
350 LOG(ERROR) << "Less than 3 vertices.";
351 return false;
352 }
353
354 if (overlayMemDesc.overlayPrimitive == OverlayPrimitive::TRIANGLES &&
355 overlayMemDesc.verticesCount % 3 != 0) {
356 LOG(ERROR) << "Triangles primitive does not have vertices "
357 << "multiple of 3.";
358 return false;
359 }
360
361 const uint16_t overlayId = *((uint16_t*)(pData + idOffset));
362
363 if (overlayId != overlayMemDesc.id) {
364 LOG(ERROR) << "Overlay id mismatch "
365 << overlayId
366 << ", "
367 << overlayMemDesc.id;
368 return false;
369 }
370
371 idOffset += kIdSize + (kVertexSize * overlayMemDesc.verticesCount);
372 }
373
374 return true;
375}
376
377// TODO(b/150412555): the overlay related methods are incomplete.
378Return<SvResult> SurroundView3dSession::updateOverlays(
379 const OverlaysData& overlaysData) {
380
381 if(!VerifyOverlayData(overlaysData)) {
382 LOG(ERROR) << "VerifyOverlayData failed.";
383 return SvResult::INVALID_ARG;
384 }
385
386 return SvResult::OK;
387}
388
389Return<void> SurroundView3dSession::projectCameraPointsTo3dSurface(
390 const hidl_vec<Point2dInt>& cameraPoints,
391 const hidl_string& cameraId,
392 projectCameraPointsTo3dSurface_cb _hidl_cb) {
393
394 vector<Point3dFloat> points3d;
395 bool cameraIdFound = false;
396 for (auto& evsCameraId : mEvsCameraIds) {
397 if (cameraId == evsCameraId) {
398 cameraIdFound = true;
399 LOG(INFO) << "Camera id found.";
400 break;
401 }
402 }
403
404 if (!cameraIdFound) {
405 LOG(ERROR) << "Camera id not found.";
406 _hidl_cb(points3d);
407 return {};
408 }
409
410 for (const auto& cameraPoint : cameraPoints) {
411 Point3dFloat point3d;
412 point3d.isValid = (cameraPoint.x >= 0
413 && cameraPoint.x < mConfig.width
414 && cameraPoint.y >= 0
415 && cameraPoint.y < mConfig.height);
416 if (!point3d.isValid) {
417 LOG(WARNING) << "Camera point out of bounds.";
418 }
419 points3d.push_back(point3d);
420 }
421 _hidl_cb(points3d);
422 return {};
423}
424
Haoxiang Li3730d172020-05-21 08:26:21 -0700425bool SurroundView3dSession::handleFrames(int sequenceId) {
426
427 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
428
Haoxiang Li070f17d2020-05-28 14:06:25 -0700429 // If the width/height was changed, re-allocate the data pointer.
430 if (mOutputWidth != mConfig.width
431 || mOutputHeight != mConfig.height) {
432 LOG(DEBUG) << "Config changed. Re-allocate memory. "
433 << "Old width: "
434 << mOutputWidth
435 << ", old height: "
436 << mOutputHeight
437 << "; New width: "
438 << mConfig.width
439 << ", new height: "
440 << mConfig.height;
441 delete[] static_cast<char*>(mOutputPointer.data_pointer);
442 mOutputWidth = mConfig.width;
443 mOutputHeight = mConfig.height;
444 mOutputPointer.height = mOutputHeight;
445 mOutputPointer.width = mOutputWidth;
446 mOutputPointer.format = Format::RGBA;
447 mOutputPointer.data_pointer =
448 new char[mOutputHeight * mOutputWidth * kNumChannels];
449
450 if (!mOutputPointer.data_pointer) {
451 LOG(ERROR) << "Memory allocation failed. Exiting.";
452 return false;
453 }
454
455 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
456 mSurroundView->Update3dOutputResolution(size);
457
458 mSvTexture = new GraphicBuffer(mOutputWidth,
459 mOutputHeight,
460 HAL_PIXEL_FORMAT_RGBA_8888,
461 1,
462 GRALLOC_USAGE_HW_TEXTURE,
463 "SvTexture");
464 if (mSvTexture->initCheck() == OK) {
465 LOG(INFO) << "Successfully allocated Graphic Buffer";
466 } else {
467 LOG(ERROR) << "Failed to allocate Graphic Buffer";
468 return false;
469 }
470 }
471
Haoxiang Li3730d172020-05-21 08:26:21 -0700472 // TODO(b/150412555): do not use the setViews for frames generation
473 // since there is a discrepancy between the HIDL APIs and core lib APIs.
474 array<array<float, 4>, 4> matrix;
475
476 // TODO(b/150412555): use hard-coded views for now. Change view every
477 // frame.
478 int recViewId = sequenceId % 16;
479 for (int i=0; i<4; i++)
480 for (int j=0; j<4; j++) {
481 matrix[i][j] = kRecViews[recViewId][i*4+j];
482 }
483
484 if (mSurroundView->Get3dSurroundView(
485 mInputPointers, matrix, &mOutputPointer)) {
486 LOG(INFO) << "Get3dSurroundView succeeded";
487 } else {
488 LOG(ERROR) << "Get3dSurroundView failed. "
489 << "Using memset to initialize to gray.";
490 memset(mOutputPointer.data_pointer, kGrayColor,
491 mOutputHeight * mOutputWidth * kNumChannels);
492 }
493
494 void* textureDataPtr = nullptr;
495 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
496 | GRALLOC_USAGE_SW_READ_NEVER,
497 &textureDataPtr);
498 if (!textureDataPtr) {
499 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
500 return false;
501 }
502
503 // Note: there is a chance that the stride of the texture is not the
504 // same as the width. For example, when the input frame is 1920 * 1080,
505 // the width is 1080, but the stride is 2048. So we'd better copy the
506 // data line by line, instead of single memcpy.
507 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
508 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
509 const int readStride = mOutputWidth * kNumChannels;
510 const int writeStride = mSvTexture->getStride() * kNumChannels;
511 if (readStride == writeStride) {
512 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
513 } else {
514 for (int i=0; i<mSvTexture->getHeight(); i++) {
515 memcpy(writePtr, readPtr, readStride);
516 writePtr = writePtr + writeStride;
517 readPtr = readPtr + readStride;
518 }
519 }
520 LOG(INFO) << "memcpy finished!";
521 mSvTexture->unlock();
522
523 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
524 LOG(DEBUG) << "ANativeWindowBuffer->handle: " << buffer->handle;
525
526 framesRecord.frames.svBuffers.resize(1);
527 SvBuffer& svBuffer = framesRecord.frames.svBuffers[0];
528 svBuffer.viewId = 0;
529 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
530 AHardwareBuffer_Desc* pDesc =
531 reinterpret_cast<AHardwareBuffer_Desc *>(
532 &svBuffer.hardwareBuffer.description);
533 pDesc->width = mOutputWidth;
534 pDesc->height = mOutputHeight;
535 pDesc->layers = 1;
536 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
537 pDesc->stride = mSvTexture->getStride();
538 pDesc->format = HAL_PIXEL_FORMAT_RGBA_8888;
539 framesRecord.frames.timestampNs = elapsedRealtimeNano();
540 framesRecord.frames.sequenceId = sequenceId;
541
542 {
543 scoped_lock<mutex> lock(mAccessLock);
544
545 if (framesRecord.inUse) {
546 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
547 mStream->notify(SvEvent::FRAME_DROPPED);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000548 } else {
Haoxiang Li3730d172020-05-21 08:26:21 -0700549 framesRecord.inUse = true;
550 mStream->receiveFrames(framesRecord.frames);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000551 }
552 }
553
Haoxiang Li3730d172020-05-21 08:26:21 -0700554 return true;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000555}
556
557bool SurroundView3dSession::initialize() {
558 lock_guard<mutex> lock(mAccessLock, adopt_lock);
559
560 // TODO(b/150412555): ask core-lib team to add API description for "create"
561 // method in the .h file.
562 // The create method will never return a null pointer based the API
563 // description.
564 mSurroundView = unique_ptr<SurroundView>(Create());
565
Haoxiang Liab820892020-05-20 08:50:20 -0700566 SurroundViewStaticDataParams params =
567 SurroundViewStaticDataParams(GetCameras(),
568 Get2dParams(),
569 Get3dParams(),
570 GetUndistortionScales(),
571 GetBoundingBox(),
572 map<string, CarTexture>(),
573 map<string, CarPart>());
574 mSurroundView->SetStaticData(params);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000575
576 // TODO(b/150412555): remove after EVS camera is used
577 mInputPointers = mSurroundView->ReadImages(
578 "/etc/automotive/sv/cam0.png",
579 "/etc/automotive/sv/cam1.png",
580 "/etc/automotive/sv/cam2.png",
581 "/etc/automotive/sv/cam3.png");
582 if (mInputPointers.size() == 4
583 && mInputPointers[0].cpu_data_pointer != nullptr) {
584 LOG(INFO) << "ReadImages succeeded";
585 } else {
586 LOG(ERROR) << "Failed to read images";
587 return false;
588 }
589
590 mOutputWidth = Get3dParams().resolution.width;
591 mOutputHeight = Get3dParams().resolution.height;
592
593 mConfig.width = mOutputWidth;
594 mConfig.height = mOutputHeight;
595 mConfig.carDetails = SvQuality::HIGH;
596
597 mOutputPointer.height = mOutputHeight;
598 mOutputPointer.width = mOutputWidth;
599 mOutputPointer.format = Format::RGBA;
600 mOutputPointer.data_pointer = new char[
601 mOutputHeight * mOutputWidth * kNumChannels];
602
603 if (!mOutputPointer.data_pointer) {
604 LOG(ERROR) << "Memory allocation failed. Exiting.";
605 return false;
606 }
607
608 mSvTexture = new GraphicBuffer(mOutputWidth,
609 mOutputHeight,
610 HAL_PIXEL_FORMAT_RGBA_8888,
611 1,
612 GRALLOC_USAGE_HW_TEXTURE,
613 "SvTexture");
614
615 if (mSvTexture->initCheck() == OK) {
616 LOG(INFO) << "Successfully allocated Graphic Buffer";
617 } else {
618 LOG(ERROR) << "Failed to allocate Graphic Buffer";
619 return false;
620 }
621
Haoxiang Li070f17d2020-05-28 14:06:25 -0700622 if (!setupEvs()) {
623 LOG(ERROR) << "Failed to setup EVS components for 3d session";
624 return false;
625 }
626
Haoxiang Li35d2a702020-04-10 01:19:32 +0000627 mIsInitialized = true;
628 return true;
629}
630
Haoxiang Li070f17d2020-05-28 14:06:25 -0700631bool SurroundView3dSession::setupEvs() {
632 // Setup for EVS
633 // TODO(b/157498737): We are using hard-coded camera "group0" here. It
634 // should be read from configuration file once I/O module is ready.
635 LOG(INFO) << "Requesting camera list";
636 mEvs->getCameraList_1_1([this] (hidl_vec<CameraDesc> cameraList) {
637 LOG(INFO) << "Camera list callback received " << cameraList.size();
638 for (auto&& cam : cameraList) {
639 LOG(INFO) << "Found camera " << cam.v1.cameraId;
640 if (cam.v1.cameraId == "group0") {
641 mCameraDesc = cam;
642 }
643 }
644 });
645
646 bool foundCfg = false;
647 std::unique_ptr<Stream> targetCfg(new Stream());
648
649 // This logic picks the configuration with the largest area that supports
650 // RGBA8888 format
651 int32_t maxArea = 0;
652 camera_metadata_entry_t streamCfgs;
653 if (!find_camera_metadata_entry(
654 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
655 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
656 &streamCfgs)) {
657 // Stream configurations are found in metadata
658 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
659 streamCfgs.data.i32);
660 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
661 if (ptr->direction ==
662 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
663 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
664
665 if (ptr->width * ptr->height > maxArea) {
666 targetCfg->id = ptr->id;
667 targetCfg->width = ptr->width;
668 targetCfg->height = ptr->height;
669
670 // This client always wants below input data format
671 targetCfg->format =
672 static_cast<GraphicsPixelFormat>(
673 HAL_PIXEL_FORMAT_RGBA_8888);
674
675 maxArea = ptr->width * ptr->height;
676
677 foundCfg = true;
678 }
679 }
680 ++ptr;
681 }
682 } else {
683 LOG(WARNING) << "No stream configuration data is found; "
684 << "default parameters will be used.";
685 }
686
687 if (!foundCfg) {
688 LOG(INFO) << "No config was found";
689 targetCfg = nullptr;
690 return false;
691 }
692
693 string camId = mCameraDesc.v1.cameraId.c_str();
694 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
695 if (mCamera == nullptr) {
696 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
697 return false;
698 } else {
699 LOG(INFO) << "Camera " << camId << " is opened successfully";
700 }
701
702 return true;
703}
704
705bool SurroundView3dSession::startEvs() {
706 mFramesHandler = new FramesHandler(mCamera, this);
707 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
708 if (result != EvsResult::OK) {
709 LOG(ERROR) << "Failed to start video stream";
710 return false;
711 } else {
712 LOG(INFO) << "Video stream was started successfully";
713 }
714
715 return true;
716}
717
Haoxiang Li35d2a702020-04-10 01:19:32 +0000718} // namespace implementation
719} // namespace V1_0
720} // namespace sv
721} // namespace automotive
722} // namespace hardware
723} // namespace android
724