blob: a2d1fb539f2f94ef8313390d4d432abfee33342a [file] [log] [blame]
Haoxiang Li830834b2020-03-05 15:54:34 -08001/*
2 * Copyright 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16#define LOG_TAG "SurroundViewService"
17
18#include <android-base/logging.h>
19#include <android/hardware_buffer.h>
20#include <utils/SystemClock.h>
21
22#include "SurroundView2dSession.h"
23#include "CoreLibSetupHelper.h"
24
25using namespace android_auto::surround_view;
26
27namespace android {
28namespace hardware {
29namespace automotive {
30namespace sv {
31namespace V1_0 {
32namespace implementation {
33
34static const char kGrayColor = 128;
35static const int kNumChannels = 3;
36static const int kFrameDelayInMilliseconds = 30;
37
38SurroundView2dSession::SurroundView2dSession() :
39 mStreamState(STOPPED) {
40 mEvsCameraIds = {"0", "1", "2", "3"};
41}
42
43// Methods from ::android::hardware::automotive::sv::V1_0::ISurroundViewSession
44Return<SvResult> SurroundView2dSession::startStream(
45 const sp<ISurroundViewStream>& stream) {
46 LOG(DEBUG) << __FUNCTION__;
47 scoped_lock<mutex> lock(mAccessLock);
48
49 if (!mIsInitialized && !initialize()) {
50 LOG(ERROR) << "There is an error while initializing the use case. "
51 << "Exiting";
52 return SvResult::INTERNAL_ERROR;
53 }
54
55 if (mStreamState != STOPPED) {
56 LOG(ERROR) << "Ignoring startVideoStream call"
57 << "when a stream is already running.";
58 return SvResult::INTERNAL_ERROR;
59 }
60
61 if (stream == nullptr) {
62 LOG(ERROR) << "The input stream is invalid";
63 return SvResult::INTERNAL_ERROR;
64 }
65 mStream = stream;
66
67 LOG(DEBUG) << "Notify SvEvent::STREAM_STARTED";
68 mStream->notify(SvEvent::STREAM_STARTED);
69
70 // Start the frame generation thread
71 mStreamState = RUNNING;
72 mCaptureThread = thread([this](){
73 generateFrames();
74 });
75
76 return SvResult::OK;
77}
78
79Return<void> SurroundView2dSession::stopStream() {
80 LOG(DEBUG) << __FUNCTION__;
81 unique_lock<mutex> lock(mAccessLock);
82
83 if (mStreamState == RUNNING) {
84 // Tell the GenerateFrames loop we want it to stop
85 mStreamState = STOPPING;
86
87 // Block outside the mutex until the "stop" flag has been acknowledged
88 // We won't send any more frames, but the client might still get some
89 // already in flight
90 LOG(DEBUG) << __FUNCTION__ << "Waiting for stream thread to end...";
91 lock.unlock();
92 mCaptureThread.join();
93 lock.lock();
94
95 mStreamState = STOPPED;
96 mStream = nullptr;
97 LOG(DEBUG) << "Stream marked STOPPED.";
98 }
99
100 return {};
101}
102
103Return<void> SurroundView2dSession::doneWithFrames(
104 const SvFramesDesc& svFramesDesc){
105 LOG(DEBUG) << __FUNCTION__;
106 scoped_lock <mutex> lock(mAccessLock);
107
108 framesRecord.inUse = false;
109
110 (void)svFramesDesc;
111 return {};
112}
113
114// Methods from ISurroundView2dSession follow.
115Return<void> SurroundView2dSession::get2dMappingInfo(
116 get2dMappingInfo_cb _hidl_cb) {
117 LOG(DEBUG) << __FUNCTION__;
118
119 _hidl_cb(mInfo);
120 return {};
121}
122
123Return<SvResult> SurroundView2dSession::set2dConfig(
124 const Sv2dConfig& sv2dConfig) {
125 LOG(DEBUG) << __FUNCTION__;
126 scoped_lock <mutex> lock(mAccessLock);
127
128 if (sv2dConfig.width <=0 || sv2dConfig.width > 4096) {
129 LOG(WARNING) << "The width of 2d config is out of the range (0, 4096]"
130 << "Ignored!";
131 return SvResult::INVALID_ARG;
132 }
133
134 mConfig.width = sv2dConfig.width;
135 mConfig.blending = sv2dConfig.blending;
136 mHeight = mConfig.width * mInfo.height / mInfo.width;
137
138 if (mStream != nullptr) {
139 LOG(DEBUG) << "Notify SvEvent::CONFIG_UPDATED";
140 mStream->notify(SvEvent::CONFIG_UPDATED);
141 }
142
143 return SvResult::OK;
144}
145
146Return<void> SurroundView2dSession::get2dConfig(get2dConfig_cb _hidl_cb) {
147 LOG(DEBUG) << __FUNCTION__;
148
149 _hidl_cb(mConfig);
150 return {};
151}
152
153Return<void> SurroundView2dSession::projectCameraPoints(
154 const hidl_vec<Point2dInt>& points2dCamera,
155 const hidl_string& cameraId,
156 projectCameraPoints_cb _hidl_cb) {
157 LOG(DEBUG) << __FUNCTION__;
158 scoped_lock <mutex> lock(mAccessLock);
159
160 bool cameraIdFound = false;
161 for (auto& evsCameraId : mEvsCameraIds) {
162 if (cameraId == evsCameraId) {
163 cameraIdFound = true;
164 LOG(INFO) << "Camera id found.";
165 break;
166 }
167 }
168
169 if (!cameraIdFound) {
170 LOG(ERROR) << "Camera id not found.";
171 _hidl_cb({});
172 return {};
173 }
174
175 hidl_vec<Point2dFloat> outPoints;
176 outPoints.resize(points2dCamera.size());
177
178 int width = mConfig.width;
179 int height = mHeight;
180 for (int i=0; i<points2dCamera.size(); i++) {
181 // Assuming all the points in the image frame can be projected into 2d
182 // Surround View space. Otherwise cannot.
183 if (points2dCamera[i].x < 0 || points2dCamera[i].x > width-1 ||
184 points2dCamera[i].y < 0 || points2dCamera[i].y > height-1) {
185 LOG(WARNING) << __FUNCTION__
186 << ": gets invalid 2d camera points. Ignored";
187 outPoints[i].isValid = false;
188 outPoints[i].x = 10000;
189 outPoints[i].y = 10000;
190 } else {
191 outPoints[i].isValid = true;
192 outPoints[i].x = 0;
193 outPoints[i].y = 0;
194 }
195 }
196
197 _hidl_cb(outPoints);
198 return {};
199}
200
201void SurroundView2dSession::generateFrames() {
202 int sequenceId = 0;
203
204 while(true) {
205 {
206 scoped_lock<mutex> lock(mAccessLock);
207
208 if (mStreamState != RUNNING) {
209 // Break out of our main thread loop
210 LOG(INFO) << "StreamState does not equal to RUNNING. "
211 << "Exiting the loop";
212 break;
213 }
214
215 if (mOutputWidth != mConfig.width || mOutputHeight != mHeight) {
216 LOG(DEBUG) << "Config changed. Re-allocate memory."
217 << " Old width: "
218 << mOutputWidth
219 << " Old height: "
220 << mOutputHeight
221 << " New width: "
222 << mConfig.width
223 << " New height: "
224 << mHeight;
225 delete[] static_cast<char*>(mOutputPointer.data_pointer);
226 mOutputWidth = mConfig.width;
227 mOutputHeight = mHeight;
228 mOutputPointer.height = mOutputHeight;
229 mOutputPointer.width = mOutputWidth;
230 mOutputPointer.format = Format::RGB;
231 mOutputPointer.data_pointer =
232 new char[mOutputHeight * mOutputWidth * kNumChannels];
233
234 if (!mOutputPointer.data_pointer) {
235 LOG(ERROR) << "Memory allocation failed. Exiting.";
236 break;
237 }
238
239 Size2dInteger size = Size2dInteger(mOutputWidth, mOutputHeight);
240 mSurroundView->Update2dOutputResolution(size);
241
242 mSvTexture = new GraphicBuffer(mOutputWidth,
243 mOutputHeight,
244 HAL_PIXEL_FORMAT_RGB_888,
245 1,
246 GRALLOC_USAGE_HW_TEXTURE,
247 "SvTexture");
248 if (mSvTexture->initCheck() == OK) {
249 LOG(INFO) << "Successfully allocated Graphic Buffer";
250 } else {
251 LOG(ERROR) << "Failed to allocate Graphic Buffer";
252 break;
253 }
254 }
255 }
256
257 if (mSurroundView->Get2dSurroundView(mInputPointers, &mOutputPointer)) {
258 LOG(INFO) << "Get2dSurroundView succeeded";
259 } else {
260 LOG(ERROR) << "Get2dSurroundView failed. "
261 << "Using memset to initialize to gray";
262 memset(mOutputPointer.data_pointer, kGrayColor,
263 mOutputHeight * mOutputWidth * kNumChannels);
264 }
265
266 void* textureDataPtr = nullptr;
267 mSvTexture->lock(GRALLOC_USAGE_SW_WRITE_OFTEN
268 | GRALLOC_USAGE_SW_READ_NEVER,
269 &textureDataPtr);
270 if (!textureDataPtr) {
271 LOG(ERROR) << "Failed to gain write access to GraphicBuffer!";
272 break;
273 }
274
275 // Note: there is a chance that the stride of the texture is not the same
276 // as the width. For example, when the input frame is 1920 * 1080, the
277 // width is 1080, but the stride is 2048. So we'd better copy the data line
278 // by line, instead of single memcpy.
279 uint8_t* writePtr = static_cast<uint8_t*>(textureDataPtr);
280 uint8_t* readPtr = static_cast<uint8_t*>(mOutputPointer.data_pointer);
281 const int readStride = mOutputWidth * kNumChannels;
282 const int writeStride = mSvTexture->getStride() * kNumChannels;
283 if (readStride == writeStride) {
284 memcpy(writePtr, readPtr, readStride * mSvTexture->getHeight());
285 } else {
286 for (int i=0; i<mSvTexture->getHeight(); i++) {
287 memcpy(writePtr, readPtr, readStride);
288 writePtr = writePtr + writeStride;
289 readPtr = readPtr + readStride;
290 }
291 }
292 LOG(INFO) << "memcpy finished";
293 mSvTexture->unlock();
294
295 ANativeWindowBuffer* buffer = mSvTexture->getNativeBuffer();
296 LOG(DEBUG) << "ANativeWindowBuffer->handle: "
297 << buffer->handle;
298
299 framesRecord.frames.svBuffers.resize(1);
300 SvBuffer& svBuffer = framesRecord.frames.svBuffers[0];
301 svBuffer.viewId = 0;
302 svBuffer.hardwareBuffer.nativeHandle = buffer->handle;
303 AHardwareBuffer_Desc* pDesc =
304 reinterpret_cast<AHardwareBuffer_Desc *>(
305 &svBuffer.hardwareBuffer.description);
306 pDesc->width = mOutputWidth;
307 pDesc->height = mOutputHeight;
308 pDesc->layers = 1;
309 pDesc->usage = GRALLOC_USAGE_HW_TEXTURE;
310 pDesc->stride = mSvTexture->getStride();
311 pDesc->format = HAL_PIXEL_FORMAT_RGB_888;
312 framesRecord.frames.timestampNs = elapsedRealtimeNano();
313 framesRecord.frames.sequenceId = sequenceId++;
314
315 {
316 scoped_lock<mutex> lock(mAccessLock);
317
318 if (framesRecord.inUse) {
319 LOG(DEBUG) << "Notify SvEvent::FRAME_DROPPED";
320 mStream->notify(SvEvent::FRAME_DROPPED);
321 } else {
322 framesRecord.inUse = true;
323 mStream->receiveFrames(framesRecord.frames);
324 }
325 }
326
327 // TODO(b/150412555): adding delays explicitly. This delay should be
328 // removed when EVS camera is used.
329 this_thread::sleep_for(chrono::milliseconds(
330 kFrameDelayInMilliseconds));
331 }
332
333 // If we've been asked to stop, send an event to signal the actual
334 // end of stream
335 LOG(DEBUG) << "Notify SvEvent::STREAM_STOPPED";
336 mStream->notify(SvEvent::STREAM_STOPPED);
337}
338
339bool SurroundView2dSession::initialize() {
340 lock_guard<mutex> lock(mAccessLock, adopt_lock);
341
342 // TODO(b/150412555): ask core-lib team to add API description for "create"
343 // method in the .h file.
344 // The create method will never return a null pointer based the API
345 // description.
346 mSurroundView = unique_ptr<SurroundView>(Create());
347
348 mSurroundView->SetStaticData(GetCameras(), Get2dParams(), Get3dParams(),
349 GetUndistortionScales(), GetBoundingBox());
350
351 // TODO(b/150412555): remove after EVS camera is used
352 mInputPointers = mSurroundView->ReadImages(
353 "/etc/automotive/sv/cam0.png",
354 "/etc/automotive/sv/cam1.png",
355 "/etc/automotive/sv/cam2.png",
356 "/etc/automotive/sv/cam3.png");
357 if (mInputPointers.size() == 4
358 && mInputPointers[0].cpu_data_pointer != nullptr) {
359 LOG(INFO) << "ReadImages succeeded";
360 } else {
361 LOG(ERROR) << "Failed to read images";
362 return false;
363 }
364
365 mOutputWidth = Get2dParams().resolution.width;
366 mOutputHeight = Get2dParams().resolution.height;
367
368 mConfig.width = mOutputWidth;
369 mConfig.blending = SvQuality::HIGH;
370 mHeight = mOutputHeight;
371
372 mOutputPointer.height = mOutputHeight;
373 mOutputPointer.width = mOutputWidth;
374 mOutputPointer.format = mInputPointers[0].format;
375 mOutputPointer.data_pointer = new char[
376 mOutputHeight * mOutputWidth * kNumChannels];
377
378 if (!mOutputPointer.data_pointer) {
379 LOG(ERROR) << "Memory allocation failed. Exiting.";
380 return false;
381 }
382
383 mSvTexture = new GraphicBuffer(mOutputWidth,
384 mOutputHeight,
385 HAL_PIXEL_FORMAT_RGB_888,
386 1,
387 GRALLOC_USAGE_HW_TEXTURE,
388 "SvTexture");
389
390 //TODO(b/150412555): the 2d mapping info should be read from config file.
391 mInfo.width = 8;
392 mInfo.height = 6;
393 mInfo.center.isValid = true;
394 mInfo.center.x = 0;
395 mInfo.center.y = 0;
396
397 if (mSvTexture->initCheck() == OK) {
398 LOG(INFO) << "Successfully allocated Graphic Buffer";
399 } else {
400 LOG(ERROR) << "Failed to allocate Graphic Buffer";
401 return false;
402 }
403
404 if (mSurroundView->Start2dPipeline()) {
405 LOG(INFO) << "Start2dPipeline succeeded";
406 } else {
407 LOG(ERROR) << "Start2dPipeline failed";
408 return false;
409 }
410
411 mIsInitialized = true;
412 return true;
413}
414
415} // namespace implementation
416} // namespace V1_0
417} // namespace sv
418} // namespace automotive
419} // namespace hardware
420} // namespace android
421