blob: 1a91e0db56d2356c34e6d3220422c1222f068d82 [file] [log] [blame]
Haoxiang Li35d2a702020-04-10 01:19:32 +00001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Haoxiang Li35d2a702020-04-10 01:19:32 +000016
Haoxiang Li070f17d2020-05-28 14:06:25 -070017#include "SurroundView3dSession.h"
18
Haoxiang Li35d2a702020-04-10 01:19:32 +000019#include <android-base/logging.h>
20#include <android/hardware_buffer.h>
21#include <android/hidl/memory/1.0/IMemory.h>
22#include <hidlmemory/mapping.h>
Haoxiang Li070f17d2020-05-28 14:06:25 -070023#include <system/camera_metadata.h>
Haoxiang Li35d2a702020-04-10 01:19:32 +000024#include <utils/SystemClock.h>
25
Haoxiang Liab820892020-05-20 08:50:20 -070026#include <array>
Haoxiang Li070f17d2020-05-28 14:06:25 -070027#include <thread>
Haoxiang Liab820892020-05-20 08:50:20 -070028#include <set>
29
Haoxiang Li070f17d2020-05-28 14:06:25 -070030#include <android/hardware/camera/device/3.2/ICameraDevice.h>
31
Haoxiang Li0c078242020-06-10 16:59:29 -070032#include "CameraUtils.h"
Haoxiang Li35d2a702020-04-10 01:19:32 +000033#include "sv_3d_params.h"
34
Haoxiang Li070f17d2020-05-28 14:06:25 -070035using ::android::hardware::automotive::evs::V1_0::EvsResult;
36using ::android::hardware::camera::device::V3_2::Stream;
Haoxiang Li35d2a702020-04-10 01:19:32 +000037using ::android::hardware::hidl_memory;
Haoxiang Li070f17d2020-05-28 14:06:25 -070038using ::android::hidl::memory::V1_0::IMemory;
39
40using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
Haoxiang Li35d2a702020-04-10 01:19:32 +000041
42namespace android {
43namespace hardware {
44namespace automotive {
45namespace sv {
46namespace V1_0 {
47namespace implementation {
48
Haoxiang Li070f17d2020-05-28 14:06:25 -070049typedef struct {
50 int32_t id;
51 int32_t width;
52 int32_t height;
53 int32_t format;
54 int32_t direction;
55 int32_t framerate;
56} RawStreamConfig;
57
58static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
Haoxiang Lia4d8de42020-04-10 01:19:32 +000059static const uint8_t kGrayColor = 128;
Haoxiang Lif4d84282020-06-11 21:37:18 -070060static const int kNumFrames = 4;
Haoxiang Li35d2a702020-04-10 01:19:32 +000061static const int kNumChannels = 4;
Haoxiang Li070f17d2020-05-28 14:06:25 -070062
63SurroundView3dSession::FramesHandler::FramesHandler(
64 sp<IEvsCamera> pCamera, sp<SurroundView3dSession> pSession)
65 : mCamera(pCamera),
66 mSession(pSession) {}
67
68Return<void> SurroundView3dSession::FramesHandler::deliverFrame(
69 const BufferDesc_1_0& bufDesc_1_0) {
70 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
71 mCamera->doneWithFrame(bufDesc_1_0);
72
Haoxiang Lif4d84282020-06-11 21:37:18 -070073 return {};
Haoxiang Li070f17d2020-05-28 14:06:25 -070074}
75
76Return<void> SurroundView3dSession::FramesHandler::deliverFrame_1_1(
77 const hidl_vec<BufferDesc_1_1>& buffers) {
78 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
Haoxiang Lie3011cc2020-06-10 21:33:26 -070079 mSession->mSequenceId++;
80
81 {
82 scoped_lock<mutex> lock(mSession->mAccessLock);
83 if (mSession->mProcessingEvsFrames) {
Haoxiang Lif4d84282020-06-11 21:37:18 -070084 LOG(WARNING) << "EVS frames are being processed. Skip frames:"
85 << mSession->mSequenceId;
Haoxiang Lie3011cc2020-06-10 21:33:26 -070086 mCamera->doneWithFrame_1_1(buffers);
87 return {};
88 }
89 }
Haoxiang Li070f17d2020-05-28 14:06:25 -070090
Haoxiang Lif4d84282020-06-11 21:37:18 -070091 if (buffers.size() != kNumFrames) {
92 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
93 << ", which is different from the number " << kNumFrames
94 << ", specified in config file";
95 return {};
96 }
97
98 {
99 scoped_lock<mutex> lock(mSession->mAccessLock);
100 for (int i = 0; i < kNumFrames; i++) {
101 LOG(DEBUG) << "Copying buffer No." << i
102 << " to Surround View Service";
103 mSession->copyFromBufferToPointers(buffers[i],
104 mSession->mInputPointers[i]);
105 }
106 }
107
Haoxiang Li070f17d2020-05-28 14:06:25 -0700108 mCamera->doneWithFrame_1_1(buffers);
109
110 // Notify the session that a new set of frames is ready
111 {
112 scoped_lock<mutex> lock(mSession->mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700113 mSession->mProcessingEvsFrames = true;
Haoxiang Li070f17d2020-05-28 14:06:25 -0700114 }
115 mSession->mFramesSignal.notify_all();
116
Haoxiang Lif4d84282020-06-11 21:37:18 -0700117 return {};
Haoxiang Li070f17d2020-05-28 14:06:25 -0700118}
119
120Return<void> SurroundView3dSession::FramesHandler::notify(const EvsEventDesc& event) {
121 switch(event.aType) {
122 case EvsEventType::STREAM_STOPPED:
123 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
124
125 // TODO(b/158339680): There is currently an issue in EVS reference
126 // implementation that causes STREAM_STOPPED event to be delivered
127 // properly. When the bug is fixed, we should deal with this event
128 // properly in case the EVS stream is stopped unexpectly.
129 break;
130
131 case EvsEventType::PARAMETER_CHANGED:
132 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
133 << " is set to " << event.payload[1];
134 break;
135
136 // Below events are ignored in reference implementation.
137 case EvsEventType::STREAM_STARTED:
138 [[fallthrough]];
139 case EvsEventType::FRAME_DROPPED:
140 [[fallthrough]];
141 case EvsEventType::TIMEOUT:
142 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
143 << "is received but ignored.";
144 break;
145 default:
146 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
147 break;
148 }
149
Haoxiang Lif4d84282020-06-11 21:37:18 -0700150 return {};
151}
152
153bool SurroundView3dSession::copyFromBufferToPointers(
154 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
155
156 AHardwareBuffer_Desc* pDesc =
157 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
158
159 // create a GraphicBuffer from the existing handle
160 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
161 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
162 pDesc->height, pDesc->format, pDesc->layers,
163 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
164
165 if (inputBuffer == nullptr) {
166 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
167 // Returning "true" in this error condition because we already released the
168 // previous image (if any) and so the texture may change in unpredictable
169 // ways now!
170 return false;
171 } else {
172 LOG(INFO) << "Managed to allocate GraphicBuffer with "
173 << " width: " << pDesc->width
174 << " height: " << pDesc->height
175 << " format: " << pDesc->format
176 << " stride: " << pDesc->stride;
177 }
178
179 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
180 // lock, return false.
181 void* inputDataPtr;
182 inputBuffer->lock(
183 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
184 &inputDataPtr);
185 if (!inputDataPtr) {
186 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
187 inputBuffer->unlock();
188 return false;
189 } else {
190 LOG(INFO) << "Managed to get read access to GraphicBuffer";
191 }
192
193 int stride = pDesc->stride;
194
195 // readPtr comes from EVS, and it is with 4 channels
196 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
197
198 // writePtr is with 3 channels, since that is what SV core lib expects.
199 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
200
201 for (int i = 0; i < pDesc->width; i++)
202 for (int j = 0; j < pDesc->height; j++) {
203 writePtr[(i + j * stride) * 3 + 0] =
204 readPtr[(i + j * stride) * 4 + 0];
205 writePtr[(i + j * stride) * 3 + 1] =
206 readPtr[(i + j * stride) * 4 + 1];
207 writePtr[(i + j * stride) * 3 + 2] =
208 readPtr[(i + j * stride) * 4 + 2];
209 }
210 LOG(INFO) << "Brute force copying finished";
211
212 return true;
Haoxiang Li070f17d2020-05-28 14:06:25 -0700213}
Haoxiang Li35d2a702020-04-10 01:19:32 +0000214
Haoxiang Li3730d172020-05-21 08:26:21 -0700215void SurroundView3dSession::processFrames() {
216 if (mSurroundView->Start3dPipeline()) {
217 LOG(INFO) << "Start3dPipeline succeeded";
218 } else {
219 LOG(ERROR) << "Start3dPipeline failed";
220 return;
221 }
222
Haoxiang Li070f17d2020-05-28 14:06:25 -0700223 while (true) {
Haoxiang Li3730d172020-05-21 08:26:21 -0700224 {
225 unique_lock<mutex> lock(mAccessLock);
226
Haoxiang Li070f17d2020-05-28 14:06:25 -0700227 if (mStreamState != RUNNING) {
228 break;
229 }
230
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700231 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
Haoxiang Li3730d172020-05-21 08:26:21 -0700232 }
233
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700234 handleFrames(mSequenceId);
Haoxiang Li3730d172020-05-21 08:26:21 -0700235
236 {
237 // Set the boolean to false to receive the next set of frames.
Haoxiang Li070f17d2020-05-28 14:06:25 -0700238 scoped_lock<mutex> lock(mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700239 mProcessingEvsFrames = false;
Haoxiang Li3730d172020-05-21 08:26:21 -0700240 }
241 }
Haoxiang Li070f17d2020-05-28 14:06:25 -0700242
243 // Notify the SV client that no new results will be delivered.
244 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
245 mStream->notify(SvEvent::STREAM_STOPPED);
246
247 {
248 scoped_lock<mutex> lock(mAccessLock);
249 mStreamState = STOPPED;
250 mStream = nullptr;
251 LOG(DEBUG) << "Stream marked STOPPED.";
252 }
Haoxiang Li3730d172020-05-21 08:26:21 -0700253}
254
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700255SurroundView3dSession::SurroundView3dSession(sp<IEvsEnumerator> pEvs,
256 VhalHandler* vhalHandler,
Haoxiang Lif7120b42020-06-12 12:45:36 -0700257 AnimationModule* animationModule,
258 IOModuleConfig* pConfig) :
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700259 mEvs(pEvs),
260 mStreamState(STOPPED),
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700261 mVhalHandler(vhalHandler),
Haoxiang Lif7120b42020-06-12 12:45:36 -0700262 mAnimationModule(animationModule),
263 mIOModuleConfig(pConfig) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000264 mEvsCameraIds = {"0" , "1", "2", "3"};
265}
266
Haoxiang Li070f17d2020-05-28 14:06:25 -0700267SurroundView3dSession::~SurroundView3dSession() {
268 // In case the client did not call stopStream properly, we should stop the
269 // stream explicitly. Otherwise the process thread will take forever to
270 // join.
271 stopStream();
272
273 // Waiting for the process thread to finish the buffered frames.
274 mProcessThread.join();
275
276 mEvs->closeCamera(mCamera);
277}
278
Haoxiang Li35d2a702020-04-10 01:19:32 +0000279// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession.
280Return<SvResult> SurroundView3dSession::startStream(
281 const sp<ISurroundViewStream>& stream) {
282 LOG(DEBUG) << __FUNCTION__;
283 scoped_lock<mutex> lock(mAccessLock);
284
285 if (!mIsInitialized && !initialize()) {
286 LOG(ERROR) << "There is an error while initializing the use case. "
287 << "Exiting";
288 return SvResult::INTERNAL_ERROR;
289 }
290
291 if (mStreamState != STOPPED) {
292 LOG(ERROR) << "Ignoring startVideoStream call when a stream is "
293 << "already running.";
294 return SvResult::INTERNAL_ERROR;
295 }
296
297 if (mViews.empty()) {
298 LOG(ERROR) << "No views have been set for current Surround View"
299 << "3d Session. Please call setViews before starting"
300 << "the stream.";
301 return SvResult::VIEW_NOT_SET;
302 }
303
304 if (stream == nullptr) {
305 LOG(ERROR) << "The input stream is invalid";
306 return SvResult::INTERNAL_ERROR;
307 }
308 mStream = stream;
309
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700310 mSequenceId = 0;
Haoxiang Li070f17d2020-05-28 14:06:25 -0700311 startEvs();
312
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700313 if (mVhalHandler != nullptr) {
314 if (!mVhalHandler->startPropertiesUpdate()) {
315 LOG(WARNING) << "VhalHandler cannot be started properly";
316 }
317 } else {
318 LOG(WARNING) << "VhalHandler is null. Ignored";
319 }
320
Haoxiang Li070f17d2020-05-28 14:06:25 -0700321 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
322 // reference implementation yet. Once implemented, this logic should be
323 // moved to EVS notify callback.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000324 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
325 mStream->notify(SvEvent::STREAM_STARTED);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700326 mProcessingEvsFrames = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000327
328 // Start the frame generation thread
329 mStreamState = RUNNING;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000330
Haoxiang Li3730d172020-05-21 08:26:21 -0700331 mProcessThread = thread([this]() {
332 processFrames();
333 });
334
Haoxiang Li35d2a702020-04-10 01:19:32 +0000335 return SvResult::OK;
336}
337
338Return<void> SurroundView3dSession::stopStream() {
339 LOG(DEBUG) << __FUNCTION__;
340 unique_lock <mutex> lock(mAccessLock);
341
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700342 if (mVhalHandler != nullptr) {
343 mVhalHandler->stopPropertiesUpdate();
344 } else {
345 LOG(WARNING) << "VhalHandler is null. Ignored";
346 }
347
Haoxiang Li35d2a702020-04-10 01:19:32 +0000348 if (mStreamState == RUNNING) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700349 // Tell the processFrames loop to stop processing frames
Haoxiang Li35d2a702020-04-10 01:19:32 +0000350 mStreamState = STOPPING;
351
Haoxiang Li070f17d2020-05-28 14:06:25 -0700352 // Stop the EVS stream asynchronizely
353 mCamera->stopVideoStream();
Haoxiang Li35d2a702020-04-10 01:19:32 +0000354 }
355
356 return {};
357}
358
359Return<void> SurroundView3dSession::doneWithFrames(
360 const SvFramesDesc& svFramesDesc){
361 LOG(DEBUG) << __FUNCTION__;
362 scoped_lock <mutex> lock(mAccessLock);
363
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700364 mFramesRecord.inUse = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000365
366 (void)svFramesDesc;
367 return {};
368}
369
370// Methods from ISurroundView3dSession follow.
371Return<SvResult> SurroundView3dSession::setViews(
372 const hidl_vec<View3d>& views) {
373 LOG(DEBUG) << __FUNCTION__;
374 scoped_lock <mutex> lock(mAccessLock);
375
376 mViews.resize(views.size());
377 for (int i=0; i<views.size(); i++) {
378 mViews[i] = views[i];
379 }
380
381 return SvResult::OK;
382}
383
384Return<SvResult> SurroundView3dSession::set3dConfig(const Sv3dConfig& sv3dConfig) {
385 LOG(DEBUG) << __FUNCTION__;
386 scoped_lock <mutex> lock(mAccessLock);
387
388 if (sv3dConfig.width <=0 || sv3dConfig.width > 4096) {
389 LOG(WARNING) << "The width of 3d config is out of the range (0, 4096]"
390 << "Ignored!";
391 return SvResult::INVALID_ARG;
392 }
393
394 if (sv3dConfig.height <=0 || sv3dConfig.height > 4096) {
395 LOG(WARNING) << "The height of 3d config is out of the range (0, 4096]"
396 << "Ignored!";
397 return SvResult::INVALID_ARG;
398 }
399
400 mConfig.width = sv3dConfig.width;
401 mConfig.height = sv3dConfig.height;
402 mConfig.carDetails = sv3dConfig.carDetails;
403
404 if (mStream != nullptr) {
405 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
406 mStream->notify(SvEvent::CONFIG_UPDATED);
407 }
408
409 return SvResult::OK;
410}
411
412Return<void> SurroundView3dSession::get3dConfig(get3dConfig_cb _hidl_cb) {
413 LOG(DEBUG) << __FUNCTION__;
414
415 _hidl_cb(mConfig);
416 return {};
417}
418
419bool VerifyOverlayData(const OverlaysData& overlaysData) {
420 // Check size of shared memory matches overlaysMemoryDesc.
421 const int kVertexSize = 16;
422 const int kIdSize = 2;
423 int memDescSize = 0;
424 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
425 memDescSize += kIdSize + kVertexSize * overlayMemDesc.verticesCount;
426 }
427 if (memDescSize != overlaysData.overlaysMemory.size()) {
428 LOG(ERROR) << "shared memory and overlaysMemoryDesc size mismatch.";
429 return false;
430 }
431
432 // Map memory.
433 sp<IMemory> pSharedMemory = mapMemory(overlaysData.overlaysMemory);
434 if(pSharedMemory == nullptr) {
435 LOG(ERROR) << "mapMemory failed.";
436 return false;
437 }
438
439 // Get Data pointer.
440 uint8_t* pData = static_cast<uint8_t*>(
441 static_cast<void*>(pSharedMemory->getPointer()));
442 if (pData == nullptr) {
443 LOG(ERROR) << "Shared memory getPointer() failed.";
444 return false;
445 }
446
447 int idOffset = 0;
448 set<uint16_t> overlayIdSet;
449 for (auto& overlayMemDesc : overlaysData.overlaysMemoryDesc) {
450
451 if (overlayIdSet.find(overlayMemDesc.id) != overlayIdSet.end()) {
452 LOG(ERROR) << "Duplicate id within memory descriptor.";
453 return false;
454 }
455 overlayIdSet.insert(overlayMemDesc.id);
456
457 if(overlayMemDesc.verticesCount < 3) {
458 LOG(ERROR) << "Less than 3 vertices.";
459 return false;
460 }
461
462 if (overlayMemDesc.overlayPrimitive == OverlayPrimitive::TRIANGLES &&
463 overlayMemDesc.verticesCount % 3 != 0) {
464 LOG(ERROR) << "Triangles primitive does not have vertices "
465 << "multiple of 3.";
466 return false;
467 }
468
469 const uint16_t overlayId = *((uint16_t*)(pData + idOffset));
470
471 if (overlayId != overlayMemDesc.id) {
472 LOG(ERROR) << "Overlay id mismatch "
473 << overlayId
474 << ", "
475 << overlayMemDesc.id;
476 return false;
477 }
478
479 idOffset += kIdSize + (kVertexSize * overlayMemDesc.verticesCount);
480 }
481
482 return true;
483}
484
485// TODO(b/150412555): the overlay related methods are incomplete.
486Return<SvResult> SurroundView3dSession::updateOverlays(
487 const OverlaysData& overlaysData) {
488
489 if(!VerifyOverlayData(overlaysData)) {
490 LOG(ERROR) << "VerifyOverlayData failed.";
491 return SvResult::INVALID_ARG;
492 }
493
494 return SvResult::OK;
495}
496
497Return<void> SurroundView3dSession::projectCameraPointsTo3dSurface(
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700498 const hidl_vec<Point2dInt>& cameraPoints, const hidl_string& cameraId,
499 projectCameraPointsTo3dSurface_cb _hidl_cb) {
500 LOG(DEBUG) << __FUNCTION__;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000501 bool cameraIdFound = false;
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700502 int cameraIndex = 0;
503 std::vector<Point3dFloat> points3d;
504
505 // Note: mEvsCameraIds must be in the order front, right, rear, left.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000506 for (auto& evsCameraId : mEvsCameraIds) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700507 if (cameraId == evsCameraId) {
508 cameraIdFound = true;
509 LOG(DEBUG) << "Camera id found for projection: " << cameraId;
510 break;
511 }
512 cameraIndex++;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000513 }
514
515 if (!cameraIdFound) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700516 LOG(ERROR) << "Camera id not found for projection: " << cameraId;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000517 _hidl_cb(points3d);
518 return {};
519 }
520
521 for (const auto& cameraPoint : cameraPoints) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700522 Point3dFloat point3d = {false, 0.0, 0.0, 0.0};
523
524 // Verify if camera point is within the camera resolution bounds.
525 point3d.isValid = (cameraPoint.x >= 0 && cameraPoint.x < mConfig.width &&
526 cameraPoint.y >= 0 && cameraPoint.y < mConfig.height);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000527 if (!point3d.isValid) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700528 LOG(WARNING) << "Camera point (" << cameraPoint.x << ", " << cameraPoint.y
529 << ") is out of camera resolution bounds.";
530 points3d.push_back(point3d);
531 continue;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000532 }
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700533
534 // Project points using mSurroundView function.
535 const Coordinate2dInteger camCoord(cameraPoint.x, cameraPoint.y);
536 Coordinate3dFloat projPoint3d(0.0, 0.0, 0.0);
537 point3d.isValid =
538 mSurroundView->GetProjectionPointFromRawCameraToSurroundView3d(camCoord,
539 cameraIndex,
540 &projPoint3d);
Tanmay Patil4b19e7e2020-06-22 14:33:51 -0700541 // Convert projPoint3d in meters to point3d which is in milli-meters.
542 point3d.x = projPoint3d.x * 1000.0;
543 point3d.y = projPoint3d.y * 1000.0;
544 point3d.z = projPoint3d.z * 1000.0;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000545 points3d.push_back(point3d);
546 }
547 _hidl_cb(points3d);
548 return {};
549}
550
Haoxiang Li3730d172020-05-21 08:26:21 -0700551bool SurroundView3dSession::handleFrames(int sequenceId) {
Haoxiang Li3730d172020-05-21 08:26:21 -0700552 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
553
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700554 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
555 // output frame is supported. Implement buffer queue for both of them.
556 {
557 scoped_lock<mutex> lock(mAccessLock);
558
559 if (mFramesRecord.inUse) {
560 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
561 mStream->notify(SvEvent::FRAME_DROPPED);
562 return true;
563 }
564 }
565
Haoxiang Li070f17d2020-05-28 14:06:25 -0700566 // If the width/height was changed, re-allocate the data pointer.
567 if (mOutputWidth != mConfig.width
568 || mOutputHeight != mConfig.height) {
569 LOG(DEBUG) << "Config changed. Re-allocate memory. "
570 << "Old width: "
571 << mOutputWidth
572 << ", old height: "
573 << mOutputHeight
574 << "; New width: "
575 << mConfig.width
576 << ", new height: "
577 << mConfig.height;
578 delete[] static_cast<char*>(mOutputPointer.data_pointer);
579 mOutputWidth = mConfig.width;
580 mOutputHeight = mConfig.height;
581 mOutputPointer.height = mOutputHeight;
582 mOutputPointer.width = mOutputWidth;
583 mOutputPointer.format = Format::RGBA;
584 mOutputPointer.data_pointer =
585 new char[mOutputHeight * mOutputWidth * kNumChannels];
586
587 if (!mOutputPointer.data_pointer) {
588 LOG(ERROR) << "Memory allocation failed. Exiting.";
589 return false;
590 }
591
592 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
593 mSurroundView->Update3dOutputResolution(size);
594
595 mSvTexture = new GraphicBuffer(mOutputWidth,
596 mOutputHeight,
597 HAL_PIXEL_FORMAT_RGBA_8888,
598 1,
599 GRALLOC_USAGE_HW_TEXTURE,
600 "SvTexture");
601 if (mSvTexture->initCheck() == OK) {
602 LOG(INFO) << "Successfully allocated Graphic Buffer";
603 } else {
604 LOG(ERROR) << "Failed to allocate Graphic Buffer";
605 return false;
606 }
607 }
608
Haoxiang Li3730d172020-05-21 08:26:21 -0700609 // TODO(b/150412555): do not use the setViews for frames generation
610 // since there is a discrepancy between the HIDL APIs and core lib APIs.
611 array<array<float, 4>, 4> matrix;
612
613 // TODO(b/150412555): use hard-coded views for now. Change view every
614 // frame.
615 int recViewId = sequenceId % 16;
616 for (int i=0; i<4; i++)
617 for (int j=0; j<4; j++) {
618 matrix[i][j] = kRecViews[recViewId][i*4+j];
619 }
620
Haoxiang Lif488c7e2020-05-27 09:12:13 -0700621 // Get the latest VHal property values
622 if (mVhalHandler != nullptr) {
623 if (!mVhalHandler->getPropertyValues(&mPropertyValues)) {
624 LOG(ERROR) << "Failed to get property values";
625 }
626 } else {
627 LOG(WARNING) << "VhalHandler is null. Ignored";
628 }
629
Haoxiang Lid564aaf2020-06-10 22:26:37 -0700630 vector<AnimationParam> params;
631 if (mAnimationModule != nullptr) {
632 params = mAnimationModule->getUpdatedAnimationParams(mPropertyValues);
633 } else {
634 LOG(WARNING) << "AnimationModule is null. Ignored";
635 }
636
637 if (!params.empty()) {
638 mSurroundView->SetAnimations(params);
639 } else {
640 LOG(INFO) << "AnimationParams is empty. Ignored";
641 }
642
Haoxiang Li3730d172020-05-21 08:26:21 -0700643 if (mSurroundView->Get3dSurroundView(
644 mInputPointers, matrix, &mOutputPointer)) {
645 LOG(INFO) << "Get3dSurroundView succeeded";
646 } else {
647 LOG(ERROR) << "Get3dSurroundView failed. "
648 << "Using memset to initialize to gray.";
649 memset(mOutputPointer.data_pointer, kGrayColor,
650 mOutputHeight * mOutputWidth * kNumChannels);
651 }
652
653 void* textureDataPtr = nullptr;
654 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
655 | GRALLOC_USAGE_SW_READ_NEVER,
656 &textureDataPtr);
657 if (!textureDataPtr) {
658 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
659 return false;
660 }
661
662 // Note: there is a chance that the stride of the texture is not the
663 // same as the width. For example, when the input frame is 1920 * 1080,
664 // the width is 1080, but the stride is 2048. So we'd better copy the
665 // data line by line, instead of single memcpy.
666 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
667 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
668 const int readStride = mOutputWidth * kNumChannels;
669 const int writeStride = mSvTexture->getStride() * kNumChannels;
670 if (readStride == writeStride) {
671 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
672 } else {
673 for (int i=0; i<mSvTexture->getHeight(); i++) {
674 memcpy(writePtr, readPtr, readStride);
675 writePtr = writePtr + writeStride;
676 readPtr = readPtr + readStride;
677 }
678 }
679 LOG(INFO) << "memcpy finished!";
680 mSvTexture->unlock();
681
682 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
683 LOG(DEBUG) << "ANativeWindowBuffer->handle: " << buffer->handle;
684
Haoxiang Li3730d172020-05-21 08:26:21 -0700685 {
686 scoped_lock<mutex> lock(mAccessLock);
687
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700688 mFramesRecord.frames.svBuffers.resize(1);
689 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
690 svBuffer.viewId = 0;
691 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
692 AHardwareBuffer_Desc* pDesc =
693 reinterpret_cast<AHardwareBuffer_Desc *>(
694 &svBuffer.hardwareBuffer.description);
695 pDesc->width = mOutputWidth;
696 pDesc->height = mOutputHeight;
697 pDesc->layers = 1;
698 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
699 pDesc->stride = mSvTexture->getStride();
700 pDesc->format = HAL_PIXEL_FORMAT_RGBA_8888;
701 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
702 mFramesRecord.frames.sequenceId = sequenceId;
703
704 mFramesRecord.inUse = true;
705 mStream->receiveFrames(mFramesRecord.frames);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000706 }
707
Haoxiang Li3730d172020-05-21 08:26:21 -0700708 return true;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000709}
710
711bool SurroundView3dSession::initialize() {
712 lock_guard<mutex> lock(mAccessLock, adopt_lock);
713
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700714 if (!setupEvs()) {
715 LOG(ERROR) << "Failed to setup EVS components for 3d session";
716 return false;
717 }
718
Haoxiang Li35d2a702020-04-10 01:19:32 +0000719 // TODO(b/150412555): ask core-lib team to add API description for "create"
720 // method in the .h file.
721 // The create method will never return a null pointer based the API
722 // description.
723 mSurroundView = unique_ptr<SurroundView>(Create());
724
Haoxiang Liab820892020-05-20 08:50:20 -0700725 SurroundViewStaticDataParams params =
Haoxiang Lif7120b42020-06-12 12:45:36 -0700726 SurroundViewStaticDataParams(
727 mCameraParams,
728 mIOModuleConfig->sv2dConfig.sv2dParams,
729 mIOModuleConfig->sv3dConfig.sv3dParams,
730 GetUndistortionScales(),
731 mIOModuleConfig->sv2dConfig.carBoundingBox,
732 mIOModuleConfig->carModelConfig.carModel.texturesMap,
733 mIOModuleConfig->carModelConfig.carModel.partsMap);
Haoxiang Liab820892020-05-20 08:50:20 -0700734 mSurroundView->SetStaticData(params);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000735
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700736 mInputPointers.resize(kNumFrames);
737 for (int i = 0; i < kNumFrames; i++) {
738 mInputPointers[i].width = mCameraParams[i].size.width;
739 mInputPointers[i].height = mCameraParams[i].size.height;
Haoxiang Lif4d84282020-06-11 21:37:18 -0700740 mInputPointers[i].format = Format::RGB;
741 mInputPointers[i].cpu_data_pointer =
742 (void*)new uint8_t[mInputPointers[i].width *
743 mInputPointers[i].height *
744 kNumChannels];
Haoxiang Li35d2a702020-04-10 01:19:32 +0000745 }
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700746 LOG(INFO) << "Allocated " << kNumFrames << " input pointers";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000747
Haoxiang Lif7120b42020-06-12 12:45:36 -0700748 mOutputWidth = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.width;
749 mOutputHeight = mIOModuleConfig->sv3dConfig.sv3dParams.resolution.height;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000750
751 mConfig.width = mOutputWidth;
752 mConfig.height = mOutputHeight;
753 mConfig.carDetails = SvQuality::HIGH;
754
755 mOutputPointer.height = mOutputHeight;
756 mOutputPointer.width = mOutputWidth;
757 mOutputPointer.format = Format::RGBA;
758 mOutputPointer.data_pointer = new char[
759 mOutputHeight * mOutputWidth * kNumChannels];
760
761 if (!mOutputPointer.data_pointer) {
762 LOG(ERROR) << "Memory allocation failed. Exiting.";
763 return false;
764 }
765
766 mSvTexture = new GraphicBuffer(mOutputWidth,
767 mOutputHeight,
768 HAL_PIXEL_FORMAT_RGBA_8888,
769 1,
770 GRALLOC_USAGE_HW_TEXTURE,
771 "SvTexture");
772
773 if (mSvTexture->initCheck() == OK) {
774 LOG(INFO) << "Successfully allocated Graphic Buffer";
775 } else {
776 LOG(ERROR) << "Failed to allocate Graphic Buffer";
777 return false;
778 }
779
Haoxiang Li070f17d2020-05-28 14:06:25 -0700780
Haoxiang Li35d2a702020-04-10 01:19:32 +0000781 mIsInitialized = true;
782 return true;
783}
784
Haoxiang Li070f17d2020-05-28 14:06:25 -0700785bool SurroundView3dSession::setupEvs() {
Haoxiang Lif7120b42020-06-12 12:45:36 -0700786 // Reads the camera related information from the config object
787 const string evsGroupId = mIOModuleConfig->cameraConfig.evsGroupId;
788
Haoxiang Li070f17d2020-05-28 14:06:25 -0700789 // Setup for EVS
Haoxiang Li070f17d2020-05-28 14:06:25 -0700790 LOG(INFO) << "Requesting camera list";
Haoxiang Lif7120b42020-06-12 12:45:36 -0700791 mEvs->getCameraList_1_1(
792 [this, evsGroupId] (hidl_vec<CameraDesc> cameraList) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700793 LOG(INFO) << "Camera list callback received " << cameraList.size();
794 for (auto&& cam : cameraList) {
795 LOG(INFO) << "Found camera " << cam.v1.cameraId;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700796 if (cam.v1.cameraId == evsGroupId) {
Haoxiang Li070f17d2020-05-28 14:06:25 -0700797 mCameraDesc = cam;
798 }
799 }
800 });
801
802 bool foundCfg = false;
803 std::unique_ptr<Stream> targetCfg(new Stream());
804
805 // This logic picks the configuration with the largest area that supports
806 // RGBA8888 format
807 int32_t maxArea = 0;
808 camera_metadata_entry_t streamCfgs;
809 if (!find_camera_metadata_entry(
810 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
811 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
812 &streamCfgs)) {
813 // Stream configurations are found in metadata
814 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
815 streamCfgs.data.i32);
816 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
817 if (ptr->direction ==
818 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
819 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
820
821 if (ptr->width * ptr->height > maxArea) {
822 targetCfg->id = ptr->id;
823 targetCfg->width = ptr->width;
824 targetCfg->height = ptr->height;
825
826 // This client always wants below input data format
827 targetCfg->format =
828 static_cast<GraphicsPixelFormat>(
829 HAL_PIXEL_FORMAT_RGBA_8888);
830
831 maxArea = ptr->width * ptr->height;
832
833 foundCfg = true;
834 }
835 }
836 ++ptr;
837 }
838 } else {
839 LOG(WARNING) << "No stream configuration data is found; "
840 << "default parameters will be used.";
841 }
842
843 if (!foundCfg) {
844 LOG(INFO) << "No config was found";
845 targetCfg = nullptr;
846 return false;
847 }
848
849 string camId = mCameraDesc.v1.cameraId.c_str();
850 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
851 if (mCamera == nullptr) {
852 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
853 return false;
854 } else {
855 LOG(INFO) << "Camera " << camId << " is opened successfully";
856 }
857
Haoxiang Li0c078242020-06-10 16:59:29 -0700858 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700859 for (const auto& id : mIOModuleConfig->cameraConfig.evsCameraIds) {
Haoxiang Li0c078242020-06-10 16:59:29 -0700860 AndroidCameraParams params;
861 if (getAndroidCameraParams(mCamera, id, params)) {
862 cameraIdToAndroidParameters.emplace(id, params);
863 LOG(INFO) << "Camera parameters are fetched successfully for "
864 << "physical camera: " << id;
865 } else {
866 LOG(ERROR) << "Failed to get camera parameters for "
867 << "physical camera: " << id;
868 return false;
869 }
870 }
871
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700872 mCameraParams =
873 convertToSurroundViewCameraParams(cameraIdToAndroidParameters);
874
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700875 for (auto& camera : mCameraParams) {
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700876 camera.size.width = targetCfg->width;
877 camera.size.height = targetCfg->height;
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700878 camera.circular_fov = 179;
879 }
880
Haoxiang Li070f17d2020-05-28 14:06:25 -0700881 return true;
882}
883
884bool SurroundView3dSession::startEvs() {
885 mFramesHandler = new FramesHandler(mCamera, this);
886 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
887 if (result != EvsResult::OK) {
888 LOG(ERROR) << "Failed to start video stream";
889 return false;
890 } else {
891 LOG(INFO) << "Video stream was started successfully";
892 }
893
894 return true;
895}
896
Haoxiang Li35d2a702020-04-10 01:19:32 +0000897} // namespace implementation
898} // namespace V1_0
899} // namespace sv
900} // namespace automotive
901} // namespace hardware
902} // namespace android