blob: b50049c3da5b7fc1455a8d72cf2842fd8d11994f [file] [log] [blame]
Haoxiang Li35d2a702020-04-10 01:19:32 +00001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Haoxiang Li35d2a702020-04-10 01:19:32 +000016
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070017#include "SurroundView2dSession.h"
18
Haoxiang Li35d2a702020-04-10 01:19:32 +000019#include <android-base/logging.h>
20#include <android/hardware_buffer.h>
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070021#include <system/camera_metadata.h>
Haoxiang Li35d2a702020-04-10 01:19:32 +000022#include <utils/SystemClock.h>
23
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070024#include <thread>
Haoxiang Li35d2a702020-04-10 01:19:32 +000025
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070026#include <android/hardware/camera/device/3.2/ICameraDevice.h>
27
Haoxiang Li0c078242020-06-10 16:59:29 -070028#include "CameraUtils.h"
29
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070030using ::android::hardware::automotive::evs::V1_0::EvsResult;
31using ::android::hardware::camera::device::V3_2::Stream;
32
33using GraphicsPixelFormat = ::android::hardware::graphics::common::V1_0::PixelFormat;
Haoxiang Li35d2a702020-04-10 01:19:32 +000034
35namespace android {
36namespace hardware {
37namespace automotive {
38namespace sv {
39namespace V1_0 {
40namespace implementation {
41
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070042// TODO(b/158479099): There are a lot of redundant code between 2d and 3d.
43// Decrease the degree of redundancy.
44typedef struct {
45 int32_t id;
46 int32_t width;
47 int32_t height;
48 int32_t format;
49 int32_t direction;
50 int32_t framerate;
51} RawStreamConfig;
52
53static const size_t kStreamCfgSz = sizeof(RawStreamConfig);
Haoxiang Lia4d8de42020-04-10 01:19:32 +000054static const uint8_t kGrayColor = 128;
Haoxiang Li35d2a702020-04-10 01:19:32 +000055static const int kNumChannels = 3;
Haoxiang Lia091e4a2020-06-06 20:38:46 -070056static const int kNumFrames = 4;
Haoxiang Lica7719a2020-06-03 20:51:11 -070057static const int kSv2dViewId = 0;
58
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070059SurroundView2dSession::FramesHandler::FramesHandler(
60 sp<IEvsCamera> pCamera, sp<SurroundView2dSession> pSession)
61 : mCamera(pCamera),
62 mSession(pSession) {}
63
64Return<void> SurroundView2dSession::FramesHandler::deliverFrame(
65 const BufferDesc_1_0& bufDesc_1_0) {
66 LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
67 mCamera->doneWithFrame(bufDesc_1_0);
68
Haoxiang Lia091e4a2020-06-06 20:38:46 -070069 return {};
Haoxiang Lib88cd3e2020-06-04 10:27:31 -070070}
71
72Return<void> SurroundView2dSession::FramesHandler::deliverFrame_1_1(
73 const hidl_vec<BufferDesc_1_1>& buffers) {
74 LOG(INFO) << "Received " << buffers.size() << " frames from the camera";
75 mSession->mSequenceId++;
76
Haoxiang Lie3011cc2020-06-10 21:33:26 -070077 {
78 scoped_lock<mutex> lock(mSession->mAccessLock);
79 if (mSession->mProcessingEvsFrames) {
80 LOG(WARNING) << "EVS frames are being processed. Skip frames:" << mSession->mSequenceId;
81 mCamera->doneWithFrame_1_1(buffers);
82 return {};
83 }
84 }
85
Haoxiang Lia091e4a2020-06-06 20:38:46 -070086 if (buffers.size() != kNumFrames) {
87 LOG(ERROR) << "The number of incoming frames is " << buffers.size()
88 << ", which is different from the number " << kNumFrames
89 << ", specified in config file";
90 return {};
91 }
92
93 {
94 scoped_lock<mutex> lock(mSession->mAccessLock);
95 for (int i = 0; i < kNumFrames; i++) {
96 LOG(DEBUG) << "Copying buffer No." << i
97 << " to Surround View Service";
98 mSession->copyFromBufferToPointers(buffers[i],
99 mSession->mInputPointers[i]);
100 }
101 }
102
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700103 mCamera->doneWithFrame_1_1(buffers);
104
105 // Notify the session that a new set of frames is ready
106 {
107 scoped_lock<mutex> lock(mSession->mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700108 mSession->mProcessingEvsFrames = true;
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700109 }
110 mSession->mFramesSignal.notify_all();
111
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700112 return {};
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700113}
114
115Return<void> SurroundView2dSession::FramesHandler::notify(const EvsEventDesc& event) {
116 switch(event.aType) {
117 case EvsEventType::STREAM_STOPPED:
118 {
119 LOG(INFO) << "Received a STREAM_STOPPED event from Evs.";
120
121 // TODO(b/158339680): There is currently an issue in EVS reference
122 // implementation that causes STREAM_STOPPED event to be delivered
123 // properly. When the bug is fixed, we should deal with this event
124 // properly in case the EVS stream is stopped unexpectly.
125 break;
126 }
127
128 case EvsEventType::PARAMETER_CHANGED:
129 LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
130 << " is set to " << event.payload[1];
131 break;
132
133 // Below events are ignored in reference implementation.
134 case EvsEventType::STREAM_STARTED:
135 [[fallthrough]];
136 case EvsEventType::FRAME_DROPPED:
137 [[fallthrough]];
138 case EvsEventType::TIMEOUT:
139 LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
140 << "is received but ignored.";
141 break;
142 default:
143 LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
144 break;
145 }
146
Haoxiang Lia091e4a2020-06-06 20:38:46 -0700147 return {};
148}
149
150bool SurroundView2dSession::copyFromBufferToPointers(
151 BufferDesc_1_1 buffer, SurroundViewInputBufferPointers pointers) {
152
153 AHardwareBuffer_Desc* pDesc =
154 reinterpret_cast<AHardwareBuffer_Desc *>(&buffer.buffer.description);
155
156 // create a GraphicBuffer from the existing handle
157 sp<GraphicBuffer> inputBuffer = new GraphicBuffer(
158 buffer.buffer.nativeHandle, GraphicBuffer::CLONE_HANDLE, pDesc->width,
159 pDesc->height, pDesc->format, pDesc->layers,
160 GRALLOC_USAGE_HW_TEXTURE, pDesc->stride);
161
162 if (inputBuffer == nullptr) {
163 LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
164 // Returning "true" in this error condition because we already released the
165 // previous image (if any) and so the texture may change in unpredictable
166 // ways now!
167 return false;
168 } else {
169 LOG(INFO) << "Managed to allocate GraphicBuffer with "
170 << " width: " << pDesc->width
171 << " height: " << pDesc->height
172 << " format: " << pDesc->format
173 << " stride: " << pDesc->stride;
174 }
175
176 // Lock the input GraphicBuffer and map it to a pointer. If we failed to
177 // lock, return false.
178 void* inputDataPtr;
179 inputBuffer->lock(
180 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
181 &inputDataPtr);
182 if (!inputDataPtr) {
183 LOG(ERROR) << "Failed to gain read access to GraphicBuffer";
184 inputBuffer->unlock();
185 return false;
186 } else {
187 LOG(INFO) << "Managed to get read access to GraphicBuffer";
188 }
189
190 int stride = pDesc->stride;
191
192 // readPtr comes from EVS, and it is with 4 channels
193 uint8_t* readPtr = static_cast<uint8_t*>(inputDataPtr);
194
195 // writePtr comes from CV imread, and it is with 3 channels
196 uint8_t* writePtr = static_cast<uint8_t*>(pointers.cpu_data_pointer);
197
198 for (int i=0; i<pDesc->width; i++)
199 for (int j=0; j<pDesc->height; j++) {
200 writePtr[(i + j * stride) * 3 + 0] =
201 readPtr[(i + j * stride) * 4 + 0];
202 writePtr[(i + j * stride) * 3 + 1] =
203 readPtr[(i + j * stride) * 4 + 1];
204 writePtr[(i + j * stride) * 3 + 2] =
205 readPtr[(i + j * stride) * 4 + 2];
206 }
207 LOG(INFO) << "Brute force copying finished";
208
209 return true;
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700210}
211
Haoxiang Lica7719a2020-06-03 20:51:11 -0700212void SurroundView2dSession::processFrames() {
213 while (true) {
214 {
215 unique_lock<mutex> lock(mAccessLock);
216
217 if (mStreamState != RUNNING) {
218 break;
219 }
220
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700221 mFramesSignal.wait(lock, [this]() { return mProcessingEvsFrames; });
Haoxiang Lica7719a2020-06-03 20:51:11 -0700222 }
223
224 handleFrames(mSequenceId);
225
226 {
227 // Set the boolean to false to receive the next set of frames.
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700228 scoped_lock<mutex> lock(mAccessLock);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700229 mProcessingEvsFrames = false;
Haoxiang Lica7719a2020-06-03 20:51:11 -0700230 }
231 }
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700232
233 // Notify the SV client that no new results will be delivered.
234 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
235 mStream->notify(SvEvent::STREAM_STOPPED);
236
237 {
238 scoped_lock<mutex> lock(mAccessLock);
239 mStreamState = STOPPED;
240 mStream = nullptr;
241 LOG(DEBUG) << "Stream marked STOPPED.";
242 }
Haoxiang Lica7719a2020-06-03 20:51:11 -0700243}
Haoxiang Li35d2a702020-04-10 01:19:32 +0000244
Haoxiang Lif7120b42020-06-12 12:45:36 -0700245SurroundView2dSession::SurroundView2dSession(sp<IEvsEnumerator> pEvs,
246 IOModuleConfig* pConfig)
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700247 : mEvs(pEvs),
Haoxiang Lif7120b42020-06-12 12:45:36 -0700248 mIOModuleConfig(pConfig),
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700249 mStreamState(STOPPED) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000250 mEvsCameraIds = {"0", "1", "2", "3"};
251}
252
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700253SurroundView2dSession::~SurroundView2dSession() {
254 // In case the client did not call stopStream properly, we should stop the
255 // stream explicitly. Otherwise the process thread will take forever to
256 // join.
257 stopStream();
258
259 // Waiting for the process thread to finish the buffered frames.
260 if (mProcessThread.joinable()) {
261 mProcessThread.join();
262 }
263
264 mEvs->closeCamera(mCamera);
265}
266
Haoxiang Li35d2a702020-04-10 01:19:32 +0000267// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession
268Return<SvResult> SurroundView2dSession::startStream(
269 const sp<ISurroundViewStream>& stream) {
270 LOG(DEBUG) << __FUNCTION__;
271 scoped_lock<mutex> lock(mAccessLock);
272
273 if (!mIsInitialized && !initialize()) {
274 LOG(ERROR) << "There is an error while initializing the use case. "
275 << "Exiting";
276 return SvResult::INTERNAL_ERROR;
277 }
278
279 if (mStreamState != STOPPED) {
280 LOG(ERROR) << "Ignoring startVideoStream call"
281 << "when a stream is already running.";
282 return SvResult::INTERNAL_ERROR;
283 }
284
285 if (stream == nullptr) {
286 LOG(ERROR) << "The input stream is invalid";
287 return SvResult::INTERNAL_ERROR;
288 }
289 mStream = stream;
290
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700291 mSequenceId = 0;
292 startEvs();
293
294 // TODO(b/158131080): the STREAM_STARTED event is not implemented in EVS
295 // reference implementation yet. Once implemented, this logic should be
296 // moved to EVS notify callback.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000297 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
298 mStream->notify(SvEvent::STREAM_STARTED);
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700299 mProcessingEvsFrames = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000300
301 // Start the frame generation thread
302 mStreamState = RUNNING;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000303
Haoxiang Lica7719a2020-06-03 20:51:11 -0700304 mProcessThread = thread([this]() {
305 processFrames();
306 });
307
Haoxiang Li35d2a702020-04-10 01:19:32 +0000308 return SvResult::OK;
309}
310
311Return<void> SurroundView2dSession::stopStream() {
312 LOG(DEBUG) << __FUNCTION__;
313 unique_lock<mutex> lock(mAccessLock);
314
315 if (mStreamState == RUNNING) {
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700316 // Tell the processFrames loop to stop processing frames
Haoxiang Li35d2a702020-04-10 01:19:32 +0000317 mStreamState = STOPPING;
318
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700319 // Stop the EVS stream asynchronizely
320 mCamera->stopVideoStream();
321 mFramesHandler = nullptr;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000322 }
323
324 return {};
325}
326
327Return<void> SurroundView2dSession::doneWithFrames(
328 const SvFramesDesc& svFramesDesc){
329 LOG(DEBUG) << __FUNCTION__;
330 scoped_lock <mutex> lock(mAccessLock);
331
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700332 mFramesRecord.inUse = false;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000333
334 (void)svFramesDesc;
335 return {};
336}
337
338// Methods from ISurroundView2dSession follow.
339Return<void> SurroundView2dSession::get2dMappingInfo(
340 get2dMappingInfo_cb _hidl_cb) {
341 LOG(DEBUG) << __FUNCTION__;
342
343 _hidl_cb(mInfo);
344 return {};
345}
346
347Return<SvResult> SurroundView2dSession::set2dConfig(
348 const Sv2dConfig& sv2dConfig) {
349 LOG(DEBUG) << __FUNCTION__;
350 scoped_lock <mutex> lock(mAccessLock);
351
352 if (sv2dConfig.width <=0 || sv2dConfig.width > 4096) {
353 LOG(WARNING) << "The width of 2d config is out of the range (0, 4096]"
354 << "Ignored!";
355 return SvResult::INVALID_ARG;
356 }
357
358 mConfig.width = sv2dConfig.width;
359 mConfig.blending = sv2dConfig.blending;
360 mHeight = mConfig.width * mInfo.height / mInfo.width;
361
362 if (mStream != nullptr) {
363 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
364 mStream->notify(SvEvent::CONFIG_UPDATED);
365 }
366
367 return SvResult::OK;
368}
369
370Return<void> SurroundView2dSession::get2dConfig(get2dConfig_cb _hidl_cb) {
371 LOG(DEBUG) << __FUNCTION__;
372
373 _hidl_cb(mConfig);
374 return {};
375}
376
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700377Return<void> SurroundView2dSession::projectCameraPoints(const hidl_vec<Point2dInt>& points2dCamera,
378 const hidl_string& cameraId,
379 projectCameraPoints_cb _hidl_cb) {
Haoxiang Li35d2a702020-04-10 01:19:32 +0000380 LOG(DEBUG) << __FUNCTION__;
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700381 std::vector<Point2dFloat> outPoints;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000382 bool cameraIdFound = false;
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700383 int cameraIndex = 0;
384 // Note: mEvsCameraIds must be in the order front, right, rear, left.
Haoxiang Li35d2a702020-04-10 01:19:32 +0000385 for (auto& evsCameraId : mEvsCameraIds) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700386 if (cameraId == evsCameraId) {
387 cameraIdFound = true;
388 LOG(DEBUG) << "Camera id found for projection: " << cameraId;
389 break;
390 }
391 cameraIndex++;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000392 }
393
394 if (!cameraIdFound) {
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700395 LOG(ERROR) << "Camera id not found for projection: " << cameraId;
396 _hidl_cb(outPoints);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000397 return {};
398 }
399
Haoxiang Li35d2a702020-04-10 01:19:32 +0000400 int width = mConfig.width;
401 int height = mHeight;
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700402 for (const auto& cameraPoint : points2dCamera) {
403 Point2dFloat outPoint = {false, 0.0, 0.0};
404 // Check of the camear point is within the camera resolution bounds.
405 if (cameraPoint.x < 0 || cameraPoint.x > width - 1 || cameraPoint.y < 0 ||
406 cameraPoint.y > height - 1) {
407 LOG(WARNING) << "Camera point (" << cameraPoint.x << ", " << cameraPoint.y
408 << ") is out of camera resolution bounds.";
409 outPoint.isValid = false;
410 outPoints.push_back(outPoint);
411 continue;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000412 }
Tanmay Patil6a85c0e2020-06-14 15:33:39 -0700413
414 // Project points using mSurroundView function.
415 const Coordinate2dInteger camPoint(cameraPoint.x, cameraPoint.y);
416 Coordinate2dFloat projPoint2d(0.0, 0.0);
417
418 outPoint.isValid =
419 mSurroundView->GetProjectionPointFromRawCameraToSurroundView2d(camPoint,
420 cameraIndex,
421 &projPoint2d);
422 outPoint.x = projPoint2d.x;
423 outPoint.y = projPoint2d.y;
424 outPoints.push_back(outPoint);
Haoxiang Li35d2a702020-04-10 01:19:32 +0000425 }
426
427 _hidl_cb(outPoints);
428 return {};
429}
430
Haoxiang Lica7719a2020-06-03 20:51:11 -0700431bool SurroundView2dSession::handleFrames(int sequenceId) {
432 LOG(INFO) << __FUNCTION__ << "Handling sequenceId " << sequenceId << ".";
433
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700434 // TODO(b/157498592): Now only one sets of EVS input frames and one SV
435 // output frame is supported. Implement buffer queue for both of them.
436 {
437 scoped_lock<mutex> lock(mAccessLock);
438
439 if (mFramesRecord.inUse) {
440 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
441 mStream->notify(SvEvent::FRAME_DROPPED);
442 return true;
443 }
444 }
445
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700446 if (mOutputWidth != mConfig.width || mOutputHeight != mHeight) {
447 LOG(DEBUG) << "Config changed. Re-allocate memory."
448 << " Old width: "
449 << mOutputWidth
450 << " Old height: "
451 << mOutputHeight
452 << " New width: "
453 << mConfig.width
454 << " New height: "
455 << mHeight;
456 delete[] static_cast<char*>(mOutputPointer.data_pointer);
457 mOutputWidth = mConfig.width;
458 mOutputHeight = mHeight;
459 mOutputPointer.height = mOutputHeight;
460 mOutputPointer.width = mOutputWidth;
461 mOutputPointer.format = Format::RGB;
462 mOutputPointer.data_pointer =
463 new char[mOutputHeight * mOutputWidth * kNumChannels];
464
465 if (!mOutputPointer.data_pointer) {
466 LOG(ERROR) << "Memory allocation failed. Exiting.";
467 return false;
468 }
469
470 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
471 mSurroundView->Update2dOutputResolution(size);
472
473 mSvTexture = new GraphicBuffer(mOutputWidth,
474 mOutputHeight,
475 HAL_PIXEL_FORMAT_RGB_888,
476 1,
477 GRALLOC_USAGE_HW_TEXTURE,
478 "SvTexture");
479 if (mSvTexture->initCheck() == OK) {
480 LOG(INFO) << "Successfully allocated Graphic Buffer";
481 } else {
482 LOG(ERROR) << "Failed to allocate Graphic Buffer";
483 return false;
484 }
485 }
486
Haoxiang Lica7719a2020-06-03 20:51:11 -0700487 if (mSurroundView->Get2dSurroundView(mInputPointers, &mOutputPointer)) {
488 LOG(INFO) << "Get2dSurroundView succeeded";
489 } else {
490 LOG(ERROR) << "Get2dSurroundView failed. "
491 << "Using memset to initialize to gray";
492 memset(mOutputPointer.data_pointer, kGrayColor,
493 mOutputHeight * mOutputWidth * kNumChannels);
494 }
495
496 void* textureDataPtr = nullptr;
497 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
498 | GRALLOC_USAGE_SW_READ_NEVER,
499 &textureDataPtr);
500 if (!textureDataPtr) {
501 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
502 return false;
503 }
504
505 // Note: there is a chance that the stride of the texture is not the same
506 // as the width. For example, when the input frame is 1920 * 1080, the
507 // width is 1080, but the stride is 2048. So we'd better copy the data line
508 // by line, instead of single memcpy.
509 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
510 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
511 const int readStride = mOutputWidth * kNumChannels;
512 const int writeStride = mSvTexture->getStride() * kNumChannels;
513 if (readStride == writeStride) {
514 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
515 } else {
516 for (int i=0; i<mSvTexture->getHeight(); i++) {
517 memcpy(writePtr, readPtr, readStride);
518 writePtr = writePtr + writeStride;
519 readPtr = readPtr + readStride;
520 }
521 }
522 LOG(DEBUG) << "memcpy finished";
523 mSvTexture->unlock();
524
525 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
526 LOG(DEBUG) << "ANativeWindowBuffer->handle: "
527 << buffer->handle;
528
Haoxiang Lica7719a2020-06-03 20:51:11 -0700529 {
530 scoped_lock<mutex> lock(mAccessLock);
531
Haoxiang Lie3011cc2020-06-10 21:33:26 -0700532 mFramesRecord.frames.svBuffers.resize(1);
533 SvBuffer& svBuffer = mFramesRecord.frames.svBuffers[0];
534 svBuffer.viewId = kSv2dViewId;
535 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
536 AHardwareBuffer_Desc* pDesc =
537 reinterpret_cast<AHardwareBuffer_Desc*>(
538 &svBuffer.hardwareBuffer.description);
539 pDesc->width = mOutputWidth;
540 pDesc->height = mOutputHeight;
541 pDesc->layers = 1;
542 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
543 pDesc->stride = mSvTexture->getStride();
544 pDesc->format = HAL_PIXEL_FORMAT_RGB_888;
545 mFramesRecord.frames.timestampNs = elapsedRealtimeNano();
546 mFramesRecord.frames.sequenceId = sequenceId;
547
548 mFramesRecord.inUse = true;
549 mStream->receiveFrames(mFramesRecord.frames);
Haoxiang Lica7719a2020-06-03 20:51:11 -0700550 }
551
552 return true;
553}
554
Haoxiang Li35d2a702020-04-10 01:19:32 +0000555bool SurroundView2dSession::initialize() {
556 lock_guard<mutex> lock(mAccessLock, adopt_lock);
557
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700558 if (!setupEvs()) {
559 LOG(ERROR) << "Failed to setup EVS components for 2d session";
560 return false;
561 }
562
Haoxiang Li35d2a702020-04-10 01:19:32 +0000563 // TODO(b/150412555): ask core-lib team to add API description for "create"
564 // method in the .h file.
565 // The create method will never return a null pointer based the API
566 // description.
567 mSurroundView = unique_ptr<SurroundView>(Create());
568
Haoxiang Liab820892020-05-20 08:50:20 -0700569 SurroundViewStaticDataParams params =
Haoxiang Lif7120b42020-06-12 12:45:36 -0700570 SurroundViewStaticDataParams(
571 mCameraParams,
572 mIOModuleConfig->sv2dConfig.sv2dParams,
573 mIOModuleConfig->sv3dConfig.sv3dParams,
574 GetUndistortionScales(),
575 mIOModuleConfig->sv2dConfig.carBoundingBox,
576 mIOModuleConfig->carModelConfig.carModel.texturesMap,
577 mIOModuleConfig->carModelConfig.carModel.partsMap);
Haoxiang Liab820892020-05-20 08:50:20 -0700578 mSurroundView->SetStaticData(params);
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700579 if (mSurroundView->Start2dPipeline()) {
580 LOG(INFO) << "Start2dPipeline succeeded";
581 } else {
582 LOG(ERROR) << "Start2dPipeline failed";
583 return false;
584 }
Haoxiang Li35d2a702020-04-10 01:19:32 +0000585
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700586 mInputPointers.resize(kNumFrames);
587 for (int i = 0; i < kNumFrames; i++) {
588 mInputPointers[i].width = mCameraParams[i].size.width;
589 mInputPointers[i].height = mCameraParams[i].size.height;
590 mInputPointers[i].format = Format::RGB;
591 mInputPointers[i].cpu_data_pointer =
592 (void*)new uint8_t[mInputPointers[i].width *
593 mInputPointers[i].height *
594 kNumChannels];
Haoxiang Li35d2a702020-04-10 01:19:32 +0000595 }
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700596 LOG(INFO) << "Allocated " << kNumFrames << " input pointers";
Haoxiang Li35d2a702020-04-10 01:19:32 +0000597
Haoxiang Lif7120b42020-06-12 12:45:36 -0700598 mOutputWidth = mIOModuleConfig->sv2dConfig.sv2dParams.resolution.width;
599 mOutputHeight = mIOModuleConfig->sv2dConfig.sv2dParams.resolution.height;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000600
601 mConfig.width = mOutputWidth;
602 mConfig.blending = SvQuality::HIGH;
603 mHeight = mOutputHeight;
604
605 mOutputPointer.height = mOutputHeight;
606 mOutputPointer.width = mOutputWidth;
607 mOutputPointer.format = mInputPointers[0].format;
608 mOutputPointer.data_pointer = new char[
609 mOutputHeight * mOutputWidth * kNumChannels];
610
611 if (!mOutputPointer.data_pointer) {
612 LOG(ERROR) << "Memory allocation failed. Exiting.";
613 return false;
614 }
615
616 mSvTexture = new GraphicBuffer(mOutputWidth,
617 mOutputHeight,
618 HAL_PIXEL_FORMAT_RGB_888,
619 1,
620 GRALLOC_USAGE_HW_TEXTURE,
621 "SvTexture");
622
Tanmay Patil4b19e7e2020-06-22 14:33:51 -0700623 // Note: sv2dParams is in meters while mInfo must be in milli-meters.
624 mInfo.width = mIOModuleConfig->sv2dConfig.sv2dParams.physical_size.width * 1000.0;
625 mInfo.height = mIOModuleConfig->sv2dConfig.sv2dParams.physical_size.height * 1000.0;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000626 mInfo.center.isValid = true;
Tanmay Patil4b19e7e2020-06-22 14:33:51 -0700627 mInfo.center.x = mIOModuleConfig->sv2dConfig.sv2dParams.physical_center.x * 1000.0;
628 mInfo.center.y = mIOModuleConfig->sv2dConfig.sv2dParams.physical_center.y * 1000.0;
Haoxiang Li35d2a702020-04-10 01:19:32 +0000629
630 if (mSvTexture->initCheck() == OK) {
631 LOG(INFO) << "Successfully allocated Graphic Buffer";
632 } else {
633 LOG(ERROR) << "Failed to allocate Graphic Buffer";
634 return false;
635 }
636
Haoxiang Li35d2a702020-04-10 01:19:32 +0000637 mIsInitialized = true;
638 return true;
639}
640
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700641bool SurroundView2dSession::setupEvs() {
Haoxiang Lif7120b42020-06-12 12:45:36 -0700642 // Reads the camera related information from the config object
643 const string evsGroupId = mIOModuleConfig->cameraConfig.evsGroupId;
644
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700645 // Setup for EVS
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700646 LOG(INFO) << "Requesting camera list";
Haoxiang Lif7120b42020-06-12 12:45:36 -0700647 mEvs->getCameraList_1_1(
648 [this, evsGroupId] (hidl_vec<CameraDesc> cameraList) {
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700649 LOG(INFO) << "Camera list callback received " << cameraList.size();
650 for (auto&& cam : cameraList) {
651 LOG(INFO) << "Found camera " << cam.v1.cameraId;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700652 if (cam.v1.cameraId == evsGroupId) {
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700653 mCameraDesc = cam;
654 }
655 }
656 });
657
658 bool foundCfg = false;
659 std::unique_ptr<Stream> targetCfg(new Stream());
660
661 // This logic picks the configuration with the largest area that supports
662 // RGBA8888 format
663 int32_t maxArea = 0;
664 camera_metadata_entry_t streamCfgs;
665 if (!find_camera_metadata_entry(
666 reinterpret_cast<camera_metadata_t *>(mCameraDesc.metadata.data()),
667 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
668 &streamCfgs)) {
669 // Stream configurations are found in metadata
670 RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(
671 streamCfgs.data.i32);
672 for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
673 if (ptr->direction ==
674 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
675 ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
676
677 if (ptr->width * ptr->height > maxArea) {
678 targetCfg->id = ptr->id;
679 targetCfg->width = ptr->width;
680 targetCfg->height = ptr->height;
681
682 // This client always wants below input data format
683 targetCfg->format =
684 static_cast<GraphicsPixelFormat>(
685 HAL_PIXEL_FORMAT_RGBA_8888);
686
687 maxArea = ptr->width * ptr->height;
688
689 foundCfg = true;
690 }
691 }
692 ++ptr;
693 }
694 } else {
695 LOG(WARNING) << "No stream configuration data is found; "
696 << "default parameters will be used.";
697 }
698
699 if (!foundCfg) {
700 LOG(INFO) << "No config was found";
701 targetCfg = nullptr;
702 return false;
703 }
704
705 string camId = mCameraDesc.v1.cameraId.c_str();
706 mCamera = mEvs->openCamera_1_1(camId.c_str(), *targetCfg);
707 if (mCamera == nullptr) {
708 LOG(ERROR) << "Failed to allocate EVS Camera interface for " << camId;
709 return false;
710 } else {
711 LOG(INFO) << "Camera " << camId << " is opened successfully";
712 }
713
Haoxiang Li0c078242020-06-10 16:59:29 -0700714 map<string, AndroidCameraParams> cameraIdToAndroidParameters;
Haoxiang Lif7120b42020-06-12 12:45:36 -0700715 for (const auto& id : mIOModuleConfig->cameraConfig.evsCameraIds) {
Haoxiang Li0c078242020-06-10 16:59:29 -0700716 AndroidCameraParams params;
717 if (getAndroidCameraParams(mCamera, id, params)) {
718 cameraIdToAndroidParameters.emplace(id, params);
719 LOG(INFO) << "Camera parameters are fetched successfully for "
720 << "physical camera: " << id;
721 } else {
722 LOG(ERROR) << "Failed to get camera parameters for "
723 << "physical camera: " << id;
724 return false;
725 }
726 }
727
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700728 mCameraParams =
729 convertToSurroundViewCameraParams(cameraIdToAndroidParameters);
730
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700731 for (auto& camera : mCameraParams) {
Haoxiang Li1acc59e2020-06-18 14:28:09 -0700732 camera.size.width = targetCfg->width;
733 camera.size.height = targetCfg->height;
Haoxiang Lia9d23d12020-06-13 18:09:13 -0700734 camera.circular_fov = 179;
735 }
736
Haoxiang Lib88cd3e2020-06-04 10:27:31 -0700737 return true;
738}
739
740bool SurroundView2dSession::startEvs() {
741 mFramesHandler = new FramesHandler(mCamera, this);
742 Return<EvsResult> result = mCamera->startVideoStream(mFramesHandler);
743 if (result != EvsResult::OK) {
744 LOG(ERROR) << "Failed to start video stream";
745 return false;
746 } else {
747 LOG(INFO) << "Video stream was started successfully";
748 }
749
750 return true;
751}
752
Haoxiang Li35d2a702020-04-10 01:19:32 +0000753} // namespace implementation
754} // namespace V1_0
755} // namespace sv
756} // namespace automotive
757} // namespace hardware
758} // namespace android