blob: c1bc4330157d7857bf03dcc4ded82515216b6354 [file] [log] [blame]
Nipun Kwatraf9b80182010-07-12 09:17:14 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070027#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070029#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070031#include <ui/Rect.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070032#include <utils/String8.h>
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070033#include <utils/Vector.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070034#include "OMX_Video.h"
Nipun Kwatra7d435c52010-08-02 11:30:06 -070035#include <limits.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070036
37namespace android {
38
39// static
Nipun Kwatra4a857e62010-09-02 11:43:15 -070040CameraSourceTimeLapse *CameraSourceTimeLapse::Create(
Nipun Kwatraf9b80182010-07-12 09:17:14 -070041 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070042 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070043 int32_t videoFrameRate) {
44 sp<Camera> camera = Camera::connect(0);
45
46 if (camera.get() == NULL) {
47 return NULL;
48 }
49
Nipun Kwatra4a857e62010-09-02 11:43:15 -070050 return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs,
51 width, height, videoFrameRate);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070052}
53
54// static
55CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070056 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070057 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070058 int32_t videoFrameRate) {
59 if (camera.get() == NULL) {
60 return NULL;
61 }
62
Nipun Kwatra4a857e62010-09-02 11:43:15 -070063 return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs,
64 width, height, videoFrameRate);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070065}
66
67CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070068 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070069 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070070 int32_t videoFrameRate)
71 : CameraSource(camera),
Nipun Kwatraf9b80182010-07-12 09:17:14 -070072 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
73 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
74 mLastTimeLapseFrameRealTimestampUs(0),
75 mSkipCurrentFrame(false) {
76
77 LOGV("starting time lapse mode");
Nipun Kwatradce4beb2010-07-27 22:21:44 -070078 mVideoWidth = width;
79 mVideoHeight = height;
Nipun Kwatra4a857e62010-09-02 11:43:15 -070080
81 if (trySettingPreviewSize(width, height)) {
82 mUseStillCameraForTimeLapse = false;
83 } else {
84 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
85 // than the fastest rate at which the still camera can take pictures.
86 mUseStillCameraForTimeLapse = true;
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070087 CHECK(setPictureSizeToClosestSupported(width, height));
Nipun Kwatradce4beb2010-07-27 22:21:44 -070088 mNeedCropping = computeCropRectangleOffset();
Nipun Kwatraf9b80182010-07-12 09:17:14 -070089 mMeta->setInt32(kKeyWidth, width);
90 mMeta->setInt32(kKeyHeight, height);
91 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -070092
93 // Initialize quick stop variables.
94 mQuickStop = false;
95 mForceRead = false;
96 mLastReadBufferCopy = NULL;
97 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -070098}
99
100CameraSourceTimeLapse::~CameraSourceTimeLapse() {
101}
102
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700103void CameraSourceTimeLapse::startQuickReadReturns() {
104 Mutex::Autolock autoLock(mQuickStopLock);
105 LOGV("Enabling quick read returns");
106
107 // Enable quick stop mode.
108 mQuickStop = true;
109
110 if (mUseStillCameraForTimeLapse) {
111 // wake up the thread right away.
112 mTakePictureCondition.signal();
113 } else {
114 // Force dataCallbackTimestamp() coming from the video camera to not skip the
115 // next frame as we want read() to get a get a frame right away.
116 mForceRead = true;
117 }
118}
119
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700120bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
121 int64_t token = IPCThreadState::self()->clearCallingIdentity();
122 String8 s = mCamera->getParameters();
123 IPCThreadState::self()->restoreCallingIdentity(token);
124
125 CameraParameters params(s);
126 Vector<Size> supportedSizes;
127 params.getSupportedPreviewSizes(supportedSizes);
128
129 bool previewSizeSupported = false;
130 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
131 int32_t pictureWidth = supportedSizes[i].width;
132 int32_t pictureHeight = supportedSizes[i].height;
133
134 if ((pictureWidth == width) && (pictureHeight == height)) {
135 previewSizeSupported = true;
136 }
137 }
138
139 if (previewSizeSupported) {
140 LOGV("Video size (%d, %d) is a supported preview size", width, height);
141 params.setPreviewSize(width, height);
142 CHECK(mCamera->setParameters(params.flatten()));
143 return true;
144 }
145
146 return false;
147}
148
Nipun Kwatrab1fb6072010-07-30 18:30:55 -0700149bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
150 int64_t token = IPCThreadState::self()->clearCallingIdentity();
151 String8 s = mCamera->getParameters();
152 IPCThreadState::self()->restoreCallingIdentity(token);
153
154 CameraParameters params(s);
155 Vector<Size> supportedSizes;
156 params.getSupportedPictureSizes(supportedSizes);
157
158 int32_t minPictureSize = INT_MAX;
159 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
160 int32_t pictureWidth = supportedSizes[i].width;
161 int32_t pictureHeight = supportedSizes[i].height;
162
163 if ((pictureWidth >= width) && (pictureHeight >= height)) {
164 int32_t pictureSize = pictureWidth*pictureHeight;
165 if (pictureSize < minPictureSize) {
166 minPictureSize = pictureSize;
167 mPictureWidth = pictureWidth;
168 mPictureHeight = pictureHeight;
169 }
170 }
171 }
172 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
173 return (minPictureSize != INT_MAX);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700174}
175
176bool CameraSourceTimeLapse::computeCropRectangleOffset() {
177 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
178 return false;
179 }
180
181 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
182
183 int32_t widthDifference = mPictureWidth - mVideoWidth;
184 int32_t heightDifference = mPictureHeight - mVideoHeight;
185
186 mCropRectStartX = widthDifference/2;
187 mCropRectStartY = heightDifference/2;
188
189 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
190
191 return true;
192}
193
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700194void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
195 Mutex::Autolock autoLock(mQuickStopLock);
196 if (mQuickStop && (buffer == mLastReadBufferCopy)) {
197 buffer->setObserver(NULL);
198 buffer->release();
199 } else {
200 return CameraSource::signalBufferReturned(buffer);
201 }
202}
203
204void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
205 size_t sourceSize = sourceBuffer.size();
206 void* sourcePointer = sourceBuffer.data();
207
208 (*newBuffer) = new MediaBuffer(sourceSize);
209 memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
210
211 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
212}
213
214void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
215 int64_t frameTime;
216 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
217 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
218 mLastReadBufferCopy->add_ref();
219 mLastReadBufferCopy->setObserver(this);
220}
221
222status_t CameraSourceTimeLapse::read(
223 MediaBuffer **buffer, const ReadOptions *options) {
224 if (mLastReadBufferCopy == NULL) {
225 mLastReadStatus = CameraSource::read(buffer, options);
226
227 // mQuickStop may have turned to true while read was blocked. Make a copy of
228 // the buffer in that case.
229 Mutex::Autolock autoLock(mQuickStopLock);
230 if (mQuickStop && *buffer) {
231 fillLastReadBufferCopy(**buffer);
232 }
233 return mLastReadStatus;
234 } else {
235 (*buffer) = mLastReadBufferCopy;
236 (*buffer)->add_ref();
237 return mLastReadStatus;
238 }
239}
240
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700241// static
242void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
243 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
244 source->threadTimeLapseEntry();
245 return NULL;
246}
247
248void CameraSourceTimeLapse::threadTimeLapseEntry() {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700249 while (mStarted) {
250 {
251 Mutex::Autolock autoLock(mCameraIdleLock);
252 if (!mCameraIdle) {
253 mCameraIdleCondition.wait(mCameraIdleLock);
254 }
255 CHECK(mCameraIdle);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700256 mCameraIdle = false;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700257 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700258
259 // Even if mQuickStop == true we need to take one more picture
260 // as a read() may be blocked, waiting for a frame to get available.
261 // After this takePicture, if mQuickStop == true, we can safely exit
262 // this thread as read() will make a copy of this last frame and keep
263 // returning it in the quick stop mode.
264 Mutex::Autolock autoLock(mQuickStopLock);
265 CHECK_EQ(OK, mCamera->takePicture());
266 if (mQuickStop) {
267 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
268 return;
269 }
270 mTakePictureCondition.waitRelative(mQuickStopLock,
271 mTimeBetweenTimeLapseFrameCaptureUs * 1000);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700272 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700273 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700274}
275
276void CameraSourceTimeLapse::startCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700277 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700278 LOGV("start time lapse recording using still camera");
279
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700280 int64_t token = IPCThreadState::self()->clearCallingIdentity();
281 String8 s = mCamera->getParameters();
282 IPCThreadState::self()->restoreCallingIdentity(token);
283
284 CameraParameters params(s);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700285 params.setPictureSize(mPictureWidth, mPictureHeight);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700286 mCamera->setParameters(params.flatten());
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700287 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700288 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700289
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700290 // disable shutter sound and play the recording sound.
291 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
292 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
293
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700294 // create a thread which takes pictures in a loop
295 pthread_attr_t attr;
296 pthread_attr_init(&attr);
297 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
298
299 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
300 pthread_attr_destroy(&attr);
301 } else {
302 LOGV("start time lapse recording using video camera");
303 CHECK_EQ(OK, mCamera->startRecording());
304 }
305}
306
307void CameraSourceTimeLapse::stopCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700308 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700309 void *dummy;
310 pthread_join(mThreadTimeLapse, &dummy);
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700311
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700312 // Last takePicture may still be underway. Wait for the camera to get
313 // idle.
314 Mutex::Autolock autoLock(mCameraIdleLock);
315 mStopWaitingForIdleCamera = true;
316 if (!mCameraIdle) {
317 mCameraIdleCondition.wait(mCameraIdleLock);
318 }
319 CHECK(mCameraIdle);
320 mCamera->setListener(NULL);
321
322 // play the recording sound.
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700323 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700324 } else {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700325 mCamera->setListener(NULL);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700326 mCamera->stopRecording();
327 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700328 if (mLastReadBufferCopy) {
329 mLastReadBufferCopy->release();
330 mLastReadBufferCopy = NULL;
331 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700332}
333
334void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700335 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700336 mCamera->releaseRecordingFrame(frame);
337 }
338}
339
340sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
341 size_t source_size = source_data->size();
342 void* source_pointer = source_data->pointer();
343
344 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
345 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
346 memcpy(newMemory->pointer(), source_pointer, source_size);
347 return newMemory;
348}
349
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700350// Allocates IMemory of final type MemoryBase with the given size.
351sp<IMemory> allocateIMemory(size_t size) {
352 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
353 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
354 return newMemory;
355}
356
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700357// static
358void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
359 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
360 source->threadStartPreview();
361 return NULL;
362}
363
364void CameraSourceTimeLapse::threadStartPreview() {
365 CHECK_EQ(OK, mCamera->startPreview());
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700366 Mutex::Autolock autoLock(mCameraIdleLock);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700367 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700368 mCameraIdleCondition.signal();
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700369}
370
371void CameraSourceTimeLapse::restartPreview() {
372 // Start this in a different thread, so that the dataCallback can return
373 LOGV("restartPreview");
374 pthread_attr_t attr;
375 pthread_attr_init(&attr);
376 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
377
378 pthread_t threadPreview;
379 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
380 pthread_attr_destroy(&attr);
381}
382
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700383sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
384 // find the YUV format
385 int32_t srcFormat;
386 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
387 YUVImage::YUVFormat yuvFormat;
388 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
389 yuvFormat = YUVImage::YUV420SemiPlanar;
James Dong5d1d9202010-09-15 16:58:44 -0700390 } else {
391 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700392 yuvFormat = YUVImage::YUV420Planar;
393 }
394
395 // allocate memory for cropped image and setup a canvas using it.
396 sp<IMemory> croppedImageMemory = allocateIMemory(
397 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
398 YUVImage yuvImageCropped(yuvFormat,
399 mVideoWidth, mVideoHeight,
400 (uint8_t *)croppedImageMemory->pointer());
401 YUVCanvas yuvCanvasCrop(yuvImageCropped);
402
403 YUVImage yuvImageSource(yuvFormat,
404 mPictureWidth, mPictureHeight,
405 (uint8_t *)source_data->pointer());
406 yuvCanvasCrop.CopyImageRect(
407 Rect(mCropRectStartX, mCropRectStartY,
408 mCropRectStartX + mVideoWidth,
409 mCropRectStartY + mVideoHeight),
410 0, 0,
411 yuvImageSource);
412
413 return croppedImageMemory;
414}
415
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700416void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700417 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700418 // takePicture will complete after this callback, so restart preview.
419 restartPreview();
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700420 return;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700421 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700422 if (msgType != CAMERA_MSG_RAW_IMAGE) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700423 return;
424 }
425
426 LOGV("dataCallback for timelapse still frame");
427 CHECK_EQ(true, mUseStillCameraForTimeLapse);
428
429 int64_t timestampUs;
430 if (mNumFramesReceived == 0) {
431 timestampUs = mStartTimeUs;
432 } else {
433 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
434 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700435
436 if (mNeedCropping) {
437 sp<IMemory> croppedImageData = cropYUVImage(data);
438 dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
439 } else {
440 sp<IMemory> dataCopy = createIMemoryCopy(data);
441 dataCallbackTimestamp(timestampUs, msgType, dataCopy);
442 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700443}
444
445bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700446 if (mSkipCurrentFrame) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700447 mSkipCurrentFrame = false;
448 return true;
449 } else {
450 return false;
451 }
452}
453
454bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700455 if (!mUseStillCameraForTimeLapse) {
456 if (mLastTimeLapseFrameRealTimestampUs == 0) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700457 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
458 // to current time (timestampUs) and save frame data.
459 LOGV("dataCallbackTimestamp timelapse: initial frame");
460
461 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700462 return false;
463 }
464
465 {
466 Mutex::Autolock autoLock(mQuickStopLock);
467
468 // mForceRead may be set to true by startQuickReadReturns(). In that
469 // case don't skip this frame.
470 if (mForceRead) {
471 LOGV("dataCallbackTimestamp timelapse: forced read");
472 mForceRead = false;
473 *timestampUs = mLastFrameTimestampUs;
474 return false;
475 }
476 }
477
478 if (*timestampUs <
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700479 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
480 // Skip all frames from last encoded frame until
481 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
482 // Tell the camera to release its recording frame and return.
483 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
484 return true;
485 } else {
486 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
487 // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
488 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
489 // of the last encoded frame's time stamp.
490 LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
491
492 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
493 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700494 return false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700495 }
496 }
497 return false;
498}
499
500void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
501 const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700502 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700503 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700504 } else {
505 Mutex::Autolock autoLock(mCameraIdleLock);
506 // If we are using the still camera and stop() has been called, it may
507 // be waiting for the camera to get idle. In that case return
508 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
509 // to a deadlock since it tries to access CameraSource::mLock which in
510 // this case is held by CameraSource::stop() currently waiting for the
511 // camera to get idle. And camera will not get idle until this call
512 // returns.
513 if (mStopWaitingForIdleCamera) {
514 return;
515 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700516 }
517 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
518}
519
520} // namespace android