blob: 3689557bbf927df1d428772056fc98f9f4eddb8e [file] [log] [blame]
Nipun Kwatraf9b80182010-07-12 09:17:14 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070027#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070029#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070031#include <ui/Rect.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070032#include <utils/String8.h>
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070033#include <utils/Vector.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070034#include "OMX_Video.h"
Nipun Kwatra7d435c52010-08-02 11:30:06 -070035#include <limits.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070036
37namespace android {
38
39// static
James Dong0c128b62010-10-08 11:59:32 -070040CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
41 const sp<ICamera> &camera,
42 int32_t cameraId,
43 Size videoSize,
44 int32_t videoFrameRate,
45 const sp<Surface>& surface,
46 int64_t timeBetweenTimeLapseFrameCaptureUs) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -070047
James Dong0c128b62010-10-08 11:59:32 -070048 CameraSourceTimeLapse *source = new
49 CameraSourceTimeLapse(camera, cameraId,
50 videoSize, videoFrameRate, surface,
51 timeBetweenTimeLapseFrameCaptureUs);
52
53 if (source != NULL) {
54 if (source->initCheck() != OK) {
55 delete source;
56 return NULL;
57 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -070058 }
James Dong0c128b62010-10-08 11:59:32 -070059 return source;
Nipun Kwatraf9b80182010-07-12 09:17:14 -070060}
61
James Dong0c128b62010-10-08 11:59:32 -070062CameraSourceTimeLapse::CameraSourceTimeLapse(
63 const sp<ICamera>& camera,
64 int32_t cameraId,
65 Size videoSize,
66 int32_t videoFrameRate,
67 const sp<Surface>& surface,
68 int64_t timeBetweenTimeLapseFrameCaptureUs)
James Dong5fb60c72011-01-18 21:12:31 -080069 : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, true),
Nipun Kwatraf9b80182010-07-12 09:17:14 -070070 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
71 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
72 mLastTimeLapseFrameRealTimestampUs(0),
73 mSkipCurrentFrame(false) {
74
James Dong08800f32010-12-09 15:04:33 -080075 LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs);
James Dong0c128b62010-10-08 11:59:32 -070076 mVideoWidth = videoSize.width;
77 mVideoHeight = videoSize.height;
Nipun Kwatra4a857e62010-09-02 11:43:15 -070078
James Donga1d2d8f2011-01-04 16:09:07 -080079 if (trySettingVideoSize(videoSize.width, videoSize.height)) {
Nipun Kwatra4a857e62010-09-02 11:43:15 -070080 mUseStillCameraForTimeLapse = false;
81 } else {
82 // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
83 // than the fastest rate at which the still camera can take pictures.
84 mUseStillCameraForTimeLapse = true;
James Dong0c128b62010-10-08 11:59:32 -070085 CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
Nipun Kwatradce4beb2010-07-27 22:21:44 -070086 mNeedCropping = computeCropRectangleOffset();
James Dong0c128b62010-10-08 11:59:32 -070087 mMeta->setInt32(kKeyWidth, videoSize.width);
88 mMeta->setInt32(kKeyHeight, videoSize.height);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070089 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -070090
91 // Initialize quick stop variables.
92 mQuickStop = false;
93 mForceRead = false;
94 mLastReadBufferCopy = NULL;
95 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -070096}
97
98CameraSourceTimeLapse::~CameraSourceTimeLapse() {
99}
100
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700101void CameraSourceTimeLapse::startQuickReadReturns() {
102 Mutex::Autolock autoLock(mQuickStopLock);
103 LOGV("Enabling quick read returns");
104
105 // Enable quick stop mode.
106 mQuickStop = true;
107
108 if (mUseStillCameraForTimeLapse) {
109 // wake up the thread right away.
110 mTakePictureCondition.signal();
111 } else {
112 // Force dataCallbackTimestamp() coming from the video camera to not skip the
113 // next frame as we want read() to get a get a frame right away.
114 mForceRead = true;
115 }
116}
117
James Donga1d2d8f2011-01-04 16:09:07 -0800118bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) {
119 LOGV("trySettingVideoSize: %dx%d", width, height);
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700120 int64_t token = IPCThreadState::self()->clearCallingIdentity();
121 String8 s = mCamera->getParameters();
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700122
123 CameraParameters params(s);
124 Vector<Size> supportedSizes;
James Donga1d2d8f2011-01-04 16:09:07 -0800125 params.getSupportedVideoSizes(supportedSizes);
126 bool videoOutputSupported = false;
127 if (supportedSizes.size() == 0) {
128 params.getSupportedPreviewSizes(supportedSizes);
129 } else {
130 videoOutputSupported = true;
131 }
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700132
James Donga1d2d8f2011-01-04 16:09:07 -0800133 bool videoSizeSupported = false;
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700134 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
135 int32_t pictureWidth = supportedSizes[i].width;
136 int32_t pictureHeight = supportedSizes[i].height;
137
138 if ((pictureWidth == width) && (pictureHeight == height)) {
James Donga1d2d8f2011-01-04 16:09:07 -0800139 videoSizeSupported = true;
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700140 }
141 }
142
James Dong08800f32010-12-09 15:04:33 -0800143 bool isSuccessful = false;
James Donga1d2d8f2011-01-04 16:09:07 -0800144 if (videoSizeSupported) {
145 LOGV("Video size (%d, %d) is supported", width, height);
146 if (videoOutputSupported) {
147 params.setVideoSize(width, height);
148 } else {
149 params.setPreviewSize(width, height);
150 }
James Dong08800f32010-12-09 15:04:33 -0800151 if (mCamera->setParameters(params.flatten()) == OK) {
152 isSuccessful = true;
153 } else {
154 LOGE("Failed to set preview size to %dx%d", width, height);
155 isSuccessful = false;
156 }
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700157 }
158
James Dong08800f32010-12-09 15:04:33 -0800159 IPCThreadState::self()->restoreCallingIdentity(token);
160 return isSuccessful;
Nipun Kwatra4a857e62010-09-02 11:43:15 -0700161}
162
Nipun Kwatrab1fb6072010-07-30 18:30:55 -0700163bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
James Dong08800f32010-12-09 15:04:33 -0800164 LOGV("setPictureSizeToClosestSupported: %dx%d", width, height);
Nipun Kwatrab1fb6072010-07-30 18:30:55 -0700165 int64_t token = IPCThreadState::self()->clearCallingIdentity();
166 String8 s = mCamera->getParameters();
167 IPCThreadState::self()->restoreCallingIdentity(token);
168
169 CameraParameters params(s);
170 Vector<Size> supportedSizes;
171 params.getSupportedPictureSizes(supportedSizes);
172
173 int32_t minPictureSize = INT_MAX;
174 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
175 int32_t pictureWidth = supportedSizes[i].width;
176 int32_t pictureHeight = supportedSizes[i].height;
177
178 if ((pictureWidth >= width) && (pictureHeight >= height)) {
179 int32_t pictureSize = pictureWidth*pictureHeight;
180 if (pictureSize < minPictureSize) {
181 minPictureSize = pictureSize;
182 mPictureWidth = pictureWidth;
183 mPictureHeight = pictureHeight;
184 }
185 }
186 }
187 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
188 return (minPictureSize != INT_MAX);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700189}
190
191bool CameraSourceTimeLapse::computeCropRectangleOffset() {
192 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
193 return false;
194 }
195
196 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
197
198 int32_t widthDifference = mPictureWidth - mVideoWidth;
199 int32_t heightDifference = mPictureHeight - mVideoHeight;
200
201 mCropRectStartX = widthDifference/2;
202 mCropRectStartY = heightDifference/2;
203
204 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
205
206 return true;
207}
208
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700209void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
210 Mutex::Autolock autoLock(mQuickStopLock);
211 if (mQuickStop && (buffer == mLastReadBufferCopy)) {
212 buffer->setObserver(NULL);
213 buffer->release();
214 } else {
215 return CameraSource::signalBufferReturned(buffer);
216 }
217}
218
219void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
220 size_t sourceSize = sourceBuffer.size();
221 void* sourcePointer = sourceBuffer.data();
222
223 (*newBuffer) = new MediaBuffer(sourceSize);
224 memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
225
226 (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
227}
228
229void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
230 int64_t frameTime;
231 CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
232 createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
233 mLastReadBufferCopy->add_ref();
234 mLastReadBufferCopy->setObserver(this);
235}
236
237status_t CameraSourceTimeLapse::read(
238 MediaBuffer **buffer, const ReadOptions *options) {
239 if (mLastReadBufferCopy == NULL) {
240 mLastReadStatus = CameraSource::read(buffer, options);
241
242 // mQuickStop may have turned to true while read was blocked. Make a copy of
243 // the buffer in that case.
244 Mutex::Autolock autoLock(mQuickStopLock);
245 if (mQuickStop && *buffer) {
246 fillLastReadBufferCopy(**buffer);
247 }
248 return mLastReadStatus;
249 } else {
250 (*buffer) = mLastReadBufferCopy;
251 (*buffer)->add_ref();
252 return mLastReadStatus;
253 }
254}
255
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700256// static
257void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
258 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
259 source->threadTimeLapseEntry();
260 return NULL;
261}
262
263void CameraSourceTimeLapse::threadTimeLapseEntry() {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700264 while (mStarted) {
265 {
266 Mutex::Autolock autoLock(mCameraIdleLock);
267 if (!mCameraIdle) {
268 mCameraIdleCondition.wait(mCameraIdleLock);
269 }
270 CHECK(mCameraIdle);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700271 mCameraIdle = false;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700272 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700273
274 // Even if mQuickStop == true we need to take one more picture
275 // as a read() may be blocked, waiting for a frame to get available.
276 // After this takePicture, if mQuickStop == true, we can safely exit
277 // this thread as read() will make a copy of this last frame and keep
278 // returning it in the quick stop mode.
279 Mutex::Autolock autoLock(mQuickStopLock);
James Donge00cab72011-02-17 16:38:06 -0800280 CHECK_EQ(OK, mCamera->takePicture(CAMERA_MSG_RAW_IMAGE));
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700281 if (mQuickStop) {
282 LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
283 return;
284 }
285 mTakePictureCondition.waitRelative(mQuickStopLock,
286 mTimeBetweenTimeLapseFrameCaptureUs * 1000);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700287 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700288 LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700289}
290
291void CameraSourceTimeLapse::startCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700292 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700293 LOGV("start time lapse recording using still camera");
294
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700295 int64_t token = IPCThreadState::self()->clearCallingIdentity();
296 String8 s = mCamera->getParameters();
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700297
298 CameraParameters params(s);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700299 params.setPictureSize(mPictureWidth, mPictureHeight);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700300 mCamera->setParameters(params.flatten());
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700301 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700302 mStopWaitingForIdleCamera = false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700303
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700304 // disable shutter sound and play the recording sound.
305 mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
306 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
James Dong08800f32010-12-09 15:04:33 -0800307 IPCThreadState::self()->restoreCallingIdentity(token);
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700308
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700309 // create a thread which takes pictures in a loop
310 pthread_attr_t attr;
311 pthread_attr_init(&attr);
312 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
313
314 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
315 pthread_attr_destroy(&attr);
316 } else {
317 LOGV("start time lapse recording using video camera");
318 CHECK_EQ(OK, mCamera->startRecording());
319 }
320}
321
322void CameraSourceTimeLapse::stopCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700323 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700324 void *dummy;
325 pthread_join(mThreadTimeLapse, &dummy);
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700326
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700327 // Last takePicture may still be underway. Wait for the camera to get
328 // idle.
329 Mutex::Autolock autoLock(mCameraIdleLock);
330 mStopWaitingForIdleCamera = true;
331 if (!mCameraIdle) {
332 mCameraIdleCondition.wait(mCameraIdleLock);
333 }
334 CHECK(mCameraIdle);
335 mCamera->setListener(NULL);
336
337 // play the recording sound.
Nipun Kwatra8e02ca72010-09-14 21:22:59 -0700338 mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700339 } else {
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700340 mCamera->setListener(NULL);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700341 mCamera->stopRecording();
342 }
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700343 if (mLastReadBufferCopy) {
344 mLastReadBufferCopy->release();
345 mLastReadBufferCopy = NULL;
346 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700347}
348
349void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
James Dong545c22f2011-01-10 20:42:21 -0800350 if (!mUseStillCameraForTimeLapse &&
351 mCamera != NULL) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700352 mCamera->releaseRecordingFrame(frame);
353 }
354}
355
356sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
357 size_t source_size = source_data->size();
358 void* source_pointer = source_data->pointer();
359
360 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
361 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
362 memcpy(newMemory->pointer(), source_pointer, source_size);
363 return newMemory;
364}
365
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700366// Allocates IMemory of final type MemoryBase with the given size.
367sp<IMemory> allocateIMemory(size_t size) {
368 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
369 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
370 return newMemory;
371}
372
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700373// static
374void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
375 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
376 source->threadStartPreview();
377 return NULL;
378}
379
380void CameraSourceTimeLapse::threadStartPreview() {
381 CHECK_EQ(OK, mCamera->startPreview());
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700382 Mutex::Autolock autoLock(mCameraIdleLock);
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700383 mCameraIdle = true;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700384 mCameraIdleCondition.signal();
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700385}
386
387void CameraSourceTimeLapse::restartPreview() {
388 // Start this in a different thread, so that the dataCallback can return
389 LOGV("restartPreview");
390 pthread_attr_t attr;
391 pthread_attr_init(&attr);
392 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
393
394 pthread_t threadPreview;
395 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
396 pthread_attr_destroy(&attr);
397}
398
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700399sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
400 // find the YUV format
401 int32_t srcFormat;
402 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
403 YUVImage::YUVFormat yuvFormat;
404 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
405 yuvFormat = YUVImage::YUV420SemiPlanar;
James Dong5d1d9202010-09-15 16:58:44 -0700406 } else {
407 CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700408 yuvFormat = YUVImage::YUV420Planar;
409 }
410
411 // allocate memory for cropped image and setup a canvas using it.
412 sp<IMemory> croppedImageMemory = allocateIMemory(
413 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
414 YUVImage yuvImageCropped(yuvFormat,
415 mVideoWidth, mVideoHeight,
416 (uint8_t *)croppedImageMemory->pointer());
417 YUVCanvas yuvCanvasCrop(yuvImageCropped);
418
419 YUVImage yuvImageSource(yuvFormat,
420 mPictureWidth, mPictureHeight,
421 (uint8_t *)source_data->pointer());
422 yuvCanvasCrop.CopyImageRect(
423 Rect(mCropRectStartX, mCropRectStartY,
424 mCropRectStartX + mVideoWidth,
425 mCropRectStartY + mVideoHeight),
426 0, 0,
427 yuvImageSource);
428
429 return croppedImageMemory;
430}
431
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700432void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700433 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700434 // takePicture will complete after this callback, so restart preview.
435 restartPreview();
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700436 return;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700437 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700438 if (msgType != CAMERA_MSG_RAW_IMAGE) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700439 return;
440 }
441
442 LOGV("dataCallback for timelapse still frame");
443 CHECK_EQ(true, mUseStillCameraForTimeLapse);
444
445 int64_t timestampUs;
446 if (mNumFramesReceived == 0) {
447 timestampUs = mStartTimeUs;
448 } else {
449 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
450 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700451
452 if (mNeedCropping) {
453 sp<IMemory> croppedImageData = cropYUVImage(data);
454 dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
455 } else {
456 sp<IMemory> dataCopy = createIMemoryCopy(data);
457 dataCallbackTimestamp(timestampUs, msgType, dataCopy);
458 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700459}
460
461bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700462 if (mSkipCurrentFrame) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700463 mSkipCurrentFrame = false;
464 return true;
465 } else {
466 return false;
467 }
468}
469
470bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700471 if (!mUseStillCameraForTimeLapse) {
472 if (mLastTimeLapseFrameRealTimestampUs == 0) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700473 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
474 // to current time (timestampUs) and save frame data.
475 LOGV("dataCallbackTimestamp timelapse: initial frame");
476
477 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700478 return false;
479 }
480
481 {
482 Mutex::Autolock autoLock(mQuickStopLock);
483
484 // mForceRead may be set to true by startQuickReadReturns(). In that
485 // case don't skip this frame.
486 if (mForceRead) {
487 LOGV("dataCallbackTimestamp timelapse: forced read");
488 mForceRead = false;
489 *timestampUs = mLastFrameTimestampUs;
490 return false;
491 }
492 }
493
James Dong5f3ab062011-01-25 16:31:28 -0800494 // Workaround to bypass the first 2 input frames for skipping.
495 // The first 2 output frames from the encoder are: decoder specific info and
496 // the compressed video frame data for the first input video frame.
497 if (mNumFramesEncoded >= 1 && *timestampUs <
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700498 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
499 // Skip all frames from last encoded frame until
500 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
501 // Tell the camera to release its recording frame and return.
502 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
503 return true;
504 } else {
505 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
506 // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
507 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
508 // of the last encoded frame's time stamp.
509 LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
510
511 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
512 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700513 return false;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700514 }
515 }
516 return false;
517}
518
519void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
520 const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700521 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700522 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
Nipun Kwatra7553cf72010-09-15 15:08:49 -0700523 } else {
524 Mutex::Autolock autoLock(mCameraIdleLock);
525 // If we are using the still camera and stop() has been called, it may
526 // be waiting for the camera to get idle. In that case return
527 // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
528 // to a deadlock since it tries to access CameraSource::mLock which in
529 // this case is held by CameraSource::stop() currently waiting for the
530 // camera to get idle. And camera will not get idle until this call
531 // returns.
532 if (mStopWaitingForIdleCamera) {
533 return;
534 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700535 }
536 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
537}
538
539} // namespace android