blob: 2f3f20bb8464de4702233c38affa034f2629d3ae [file] [log] [blame]
Nipun Kwatraf9b80182010-07-12 09:17:14 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070027#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070029#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070031#include <ui/Rect.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070032#include <utils/String8.h>
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070033#include <utils/Vector.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070034#include "OMX_Video.h"
Nipun Kwatra7d435c52010-08-02 11:30:06 -070035#include <limits.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070036
37namespace android {
38
39// static
James Dong0c128b62010-10-08 11:59:32 -070040CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
41 const sp<ICamera> &camera,
42 int32_t cameraId,
43 Size videoSize,
44 int32_t videoFrameRate,
45 const sp<Surface>& surface,
46 int64_t timeBetweenTimeLapseFrameCaptureUs) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -070047
James Dong0c128b62010-10-08 11:59:32 -070048 CameraSourceTimeLapse *source = new
49 CameraSourceTimeLapse(camera, cameraId,
50 videoSize, videoFrameRate, surface,
51 timeBetweenTimeLapseFrameCaptureUs);
52
53 if (source != NULL) {
54 if (source->initCheck() != OK) {
55 delete source;
56 return NULL;
57 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -070058 }
James Dong0c128b62010-10-08 11:59:32 -070059 return source;
Nipun Kwatraf9b80182010-07-12 09:17:14 -070060}
61
James Dong0c128b62010-10-08 11:59:32 -070062CameraSourceTimeLapse::CameraSourceTimeLapse(
63 const sp<ICamera>& camera,
64 int32_t cameraId,
65 Size videoSize,
66 int32_t videoFrameRate,
67 const sp<Surface>& surface,
68 int64_t timeBetweenTimeLapseFrameCaptureUs)
69 : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface),
Nipun Kwatraf9b80182010-07-12 09:17:14 -070070 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
71 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
72 mLastTimeLapseFrameRealTimestampUs(0),
73 mSkipCurrentFrame(false) {
74
75 LOGV("starting time lapse mode");
James Dong0c128b62010-10-08 11:59:32 -070076 mVideoWidth = videoSize.width;
77 mVideoHeight = videoSize.height;
Nipun Kwatra4a857e62010-09-02 11:43:15 -070078
James Dong0c128b62010-10-08 11:59:32 -070079 if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
Nipun Kwatra4a857e62010-09-02 11:43:15 -070080 mUseStillCameraForTimeLapse = false;
81 } else {
82 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
83 // than the fastest rate at which the still camera can take pictures.
84 mUseStillCameraForTimeLapse = true;
James Dong0c128b62010-10-08 11:59:32 -070085 CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
Nipun Kwatradce4beb2010-07-27 22:21:44 -070086 mNeedCropping = computeCropRectangleOffset();
James Dong0c128b62010-10-08 11:59:32 -070087 mMeta->setInt32(kKeyWidth, videoSize.width);
88 mMeta->setInt32(kKeyHeight, videoSize.height);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070089 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -070090
91 // Initialize quick stop variables.
92 mQuickStop = false;
93 mForceRead = false;
94 mLastReadBufferCopy = NULL;
95 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -070096}
97
98CameraSourceTimeLapse::~CameraSourceTimeLapse() {
99}
100
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700101void CameraSourceTimeLapse::startQuickReadReturns() {
102 Mutex::Autolock autoLock(mQuickStopLock);
103 LOGV("Enabling quick read returns");
104
105 // Enable quick stop mode.
106 mQuickStop = true;
107
108 if (mUseStillCameraForTimeLapse) {
109 // wake up the thread right away.
110 mTakePictureCondition.signal();
111 } else {
112 // Force dataCallbackTimestamp() coming from the video camera to not skip the
113 // next frame as we want read() to get a get a frame right away.
114 mForceRead = true;
115 }
116}
117
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700118bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
119 int64_t token = IPCThreadState::self()->clearCallingIdentity();
120 String8 s = mCamera->getParameters();
121 IPCThreadState::self()->restoreCallingIdentity(token);
122
123 CameraParameters params(s);
124 Vector<Size> supportedSizes;
125 params.getSupportedPreviewSizes(supportedSizes);
126
127 bool previewSizeSupported = false;
128 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
129 int32_t pictureWidth = supportedSizes[i].width;
130 int32_t pictureHeight = supportedSizes[i].height;
131
132 if ((pictureWidth == width) && (pictureHeight == height)) {
133 previewSizeSupported = true;
134 }
135 }
136
137 if (previewSizeSupported) {
138 LOGV("Video size (%d, %d) is a supported preview size", width, height);
139 params.setPreviewSize(width, height);
140 CHECK(mCamera->setParameters(params.flatten()));
141 return true;
142 }
143
144 return false;
145}
146
Nipun Kwatrab1fb6072010-07-30 18:30:55 -0700147bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
148 int64_t token = IPCThreadState::self()->clearCallingIdentity();
149 String8 s = mCamera->getParameters();
150 IPCThreadState::self()->restoreCallingIdentity(token);
151
152 CameraParameters params(s);
153 Vector<Size> supportedSizes;
154 params.getSupportedPictureSizes(supportedSizes);
155
156 int32_t minPictureSize = INT_MAX;
157 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
158 int32_t pictureWidth = supportedSizes[i].width;
159 int32_t pictureHeight = supportedSizes[i].height;
160
161 if ((pictureWidth >= width) && (pictureHeight >= height)) {
162 int32_t pictureSize = pictureWidth*pictureHeight;
163 if (pictureSize < minPictureSize) {
164 minPictureSize = pictureSize;
165 mPictureWidth = pictureWidth;
166 mPictureHeight = pictureHeight;
167 }
168 }
169 }
170 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
171 return (minPictureSize != INT_MAX);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700172}
173
174bool CameraSourceTimeLapse::computeCropRectangleOffset() {
175 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
176 return false;
177 }
178
179 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
180
181 int32_t widthDifference = mPictureWidth - mVideoWidth;
182 int32_t heightDifference = mPictureHeight - mVideoHeight;
183
184 mCropRectStartX = widthDifference/2;
185 mCropRectStartY = heightDifference/2;
186
187 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
188
189 return true;
190}
191
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700192void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
193 Mutex::Autolock autoLock(mQuickStopLock);
194 if (mQuickStop && (buffer == mLastReadBufferCopy)) {
195 buffer->setObserver(NULL);
196 buffer->release();
197 } else {
198 return CameraSource::signalBufferReturned(buffer);
199 }
200}
201
202void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
203 size_t sourceSize = sourceBuffer.size();
204 void* sourcePointer = sourceBuffer.data();
205
206 (*newBuffer) = new MediaBuffer(sourceSize);
207 memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
208
209 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
210}
211
212void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
213 int64_t frameTime;
214 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
215 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
216 mLastReadBufferCopy->add_ref();
217 mLastReadBufferCopy->setObserver(this);
218}
219
220status_t CameraSourceTimeLapse::read(
221 MediaBuffer **buffer, const ReadOptions *options) {
222 if (mLastReadBufferCopy == NULL) {
223 mLastReadStatus = CameraSource::read(buffer, options);
224
225 // mQuickStop may have turned to true while read was blocked. Make a copy of
226 // the buffer in that case.
227 Mutex::Autolock autoLock(mQuickStopLock);
228 if (mQuickStop && *buffer) {
229 fillLastReadBufferCopy(**buffer);
230 }
231 return mLastReadStatus;
232 } else {
233 (*buffer) = mLastReadBufferCopy;
234 (*buffer)->add_ref();
235 return mLastReadStatus;
236 }
237}
238
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700239// static
240void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
241 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
242 source->threadTimeLapseEntry();
243 return NULL;
244}
245
246void CameraSourceTimeLapse::threadTimeLapseEntry() {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700247 while (mStarted) {
248 {
249 Mutex::Autolock autoLock(mCameraIdleLock);
250 if (!mCameraIdle) {
251 mCameraIdleCondition.wait(mCameraIdleLock);
252 }
253 CHECK(mCameraIdle);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700254 mCameraIdle = false;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700255 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700256
257 // Even if mQuickStop == true we need to take one more picture
258 // as a read() may be blocked, waiting for a frame to get available.
259 // After this takePicture, if mQuickStop == true, we can safely exit
260 // this thread as read() will make a copy of this last frame and keep
261 // returning it in the quick stop mode.
262 Mutex::Autolock autoLock(mQuickStopLock);
263 CHECK_EQ(OK, mCamera->takePicture());
264 if (mQuickStop) {
265 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
266 return;
267 }
268 mTakePictureCondition.waitRelative(mQuickStopLock,
269 mTimeBetweenTimeLapseFrameCaptureUs * 1000);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700270 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700271 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700272}
273
274void CameraSourceTimeLapse::startCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700275 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700276 LOGV("start time lapse recording using still camera");
277
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700278 int64_t token = IPCThreadState::self()->clearCallingIdentity();
279 String8 s = mCamera->getParameters();
280 IPCThreadState::self()->restoreCallingIdentity(token);
281
282 CameraParameters params(s);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700283 params.setPictureSize(mPictureWidth, mPictureHeight);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700284 mCamera->setParameters(params.flatten());
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700285 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700286 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700287
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700288 // disable shutter sound and play the recording sound.
289 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
290 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
291
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700292 // create a thread which takes pictures in a loop
293 pthread_attr_t attr;
294 pthread_attr_init(&attr);
295 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
296
297 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
298 pthread_attr_destroy(&attr);
299 } else {
300 LOGV("start time lapse recording using video camera");
301 CHECK_EQ(OK, mCamera->startRecording());
302 }
303}
304
305void CameraSourceTimeLapse::stopCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700306 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700307 void *dummy;
308 pthread_join(mThreadTimeLapse, &dummy);
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700309
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700310 // Last takePicture may still be underway. Wait for the camera to get
311 // idle.
312 Mutex::Autolock autoLock(mCameraIdleLock);
313 mStopWaitingForIdleCamera = true;
314 if (!mCameraIdle) {
315 mCameraIdleCondition.wait(mCameraIdleLock);
316 }
317 CHECK(mCameraIdle);
318 mCamera->setListener(NULL);
319
320 // play the recording sound.
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700321 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700322 } else {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700323 mCamera->setListener(NULL);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700324 mCamera->stopRecording();
325 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700326 if (mLastReadBufferCopy) {
327 mLastReadBufferCopy->release();
328 mLastReadBufferCopy = NULL;
329 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700330}
331
332void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700333 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700334 mCamera->releaseRecordingFrame(frame);
335 }
336}
337
338sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
339 size_t source_size = source_data->size();
340 void* source_pointer = source_data->pointer();
341
342 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
343 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
344 memcpy(newMemory->pointer(), source_pointer, source_size);
345 return newMemory;
346}
347
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700348// Allocates IMemory of final type MemoryBase with the given size.
349sp<IMemory> allocateIMemory(size_t size) {
350 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
351 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
352 return newMemory;
353}
354
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700355// static
356void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
357 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
358 source->threadStartPreview();
359 return NULL;
360}
361
362void CameraSourceTimeLapse::threadStartPreview() {
363 CHECK_EQ(OK, mCamera->startPreview());
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700364 Mutex::Autolock autoLock(mCameraIdleLock);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700365 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700366 mCameraIdleCondition.signal();
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700367}
368
369void CameraSourceTimeLapse::restartPreview() {
370 // Start this in a different thread, so that the dataCallback can return
371 LOGV("restartPreview");
372 pthread_attr_t attr;
373 pthread_attr_init(&attr);
374 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
375
376 pthread_t threadPreview;
377 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
378 pthread_attr_destroy(&attr);
379}
380
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700381sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
382 // find the YUV format
383 int32_t srcFormat;
384 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
385 YUVImage::YUVFormat yuvFormat;
386 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
387 yuvFormat = YUVImage::YUV420SemiPlanar;
James Dong5d1d9202010-09-15 16:58:44 -0700388 } else {
389 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700390 yuvFormat = YUVImage::YUV420Planar;
391 }
392
393 // allocate memory for cropped image and setup a canvas using it.
394 sp<IMemory> croppedImageMemory = allocateIMemory(
395 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
396 YUVImage yuvImageCropped(yuvFormat,
397 mVideoWidth, mVideoHeight,
398 (uint8_t *)croppedImageMemory->pointer());
399 YUVCanvas yuvCanvasCrop(yuvImageCropped);
400
401 YUVImage yuvImageSource(yuvFormat,
402 mPictureWidth, mPictureHeight,
403 (uint8_t *)source_data->pointer());
404 yuvCanvasCrop.CopyImageRect(
405 Rect(mCropRectStartX, mCropRectStartY,
406 mCropRectStartX + mVideoWidth,
407 mCropRectStartY + mVideoHeight),
408 0, 0,
409 yuvImageSource);
410
411 return croppedImageMemory;
412}
413
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700414void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700415 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700416 // takePicture will complete after this callback, so restart preview.
417 restartPreview();
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700418 return;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700419 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700420 if (msgType != CAMERA_MSG_RAW_IMAGE) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700421 return;
422 }
423
424 LOGV("dataCallback for timelapse still frame");
425 CHECK_EQ(true, mUseStillCameraForTimeLapse);
426
427 int64_t timestampUs;
428 if (mNumFramesReceived == 0) {
429 timestampUs = mStartTimeUs;
430 } else {
431 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
432 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700433
434 if (mNeedCropping) {
435 sp<IMemory> croppedImageData = cropYUVImage(data);
436 dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
437 } else {
438 sp<IMemory> dataCopy = createIMemoryCopy(data);
439 dataCallbackTimestamp(timestampUs, msgType, dataCopy);
440 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700441}
442
443bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700444 if (mSkipCurrentFrame) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700445 mSkipCurrentFrame = false;
446 return true;
447 } else {
448 return false;
449 }
450}
451
452bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700453 if (!mUseStillCameraForTimeLapse) {
454 if (mLastTimeLapseFrameRealTimestampUs == 0) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700455 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
456 // to current time (timestampUs) and save frame data.
457 LOGV("dataCallbackTimestamp timelapse: initial frame");
458
459 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700460 return false;
461 }
462
463 {
464 Mutex::Autolock autoLock(mQuickStopLock);
465
466 // mForceRead may be set to true by startQuickReadReturns(). In that
467 // case don't skip this frame.
468 if (mForceRead) {
469 LOGV("dataCallbackTimestamp timelapse: forced read");
470 mForceRead = false;
471 *timestampUs = mLastFrameTimestampUs;
472 return false;
473 }
474 }
475
476 if (*timestampUs <
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700477 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
478 // Skip all frames from last encoded frame until
479 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
480 // Tell the camera to release its recording frame and return.
481 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
482 return true;
483 } else {
484 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
485 // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
486 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
487 // of the last encoded frame's time stamp.
488 LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
489
490 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
491 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700492 return false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700493 }
494 }
495 return false;
496}
497
498void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
499 const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700500 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700501 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700502 } else {
503 Mutex::Autolock autoLock(mCameraIdleLock);
504 // If we are using the still camera and stop() has been called, it may
505 // be waiting for the camera to get idle. In that case return
506 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
507 // to a deadlock since it tries to access CameraSource::mLock which in
508 // this case is held by CameraSource::stop() currently waiting for the
509 // camera to get idle. And camera will not get idle until this call
510 // returns.
511 if (mStopWaitingForIdleCamera) {
512 return;
513 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700514 }
515 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
516}
517
518} // namespace android