Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2010 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //#define LOG_NDEBUG 0 |
| 18 | #define LOG_TAG "CameraSourceTimeLapse" |
| 19 | |
| 20 | #include <binder/IPCThreadState.h> |
| 21 | #include <binder/MemoryBase.h> |
| 22 | #include <binder/MemoryHeapBase.h> |
| 23 | #include <media/stagefright/CameraSource.h> |
| 24 | #include <media/stagefright/CameraSourceTimeLapse.h> |
| 25 | #include <media/stagefright/MediaDebug.h> |
| 26 | #include <media/stagefright/MetaData.h> |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 27 | #include <media/stagefright/YUVImage.h> |
| 28 | #include <media/stagefright/YUVCanvas.h> |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 29 | #include <camera/Camera.h> |
| 30 | #include <camera/CameraParameters.h> |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 31 | #include <ui/Rect.h> |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 32 | #include <utils/String8.h> |
Nipun Kwatra | b1fb607 | 2010-07-30 18:30:55 -0700 | [diff] [blame] | 33 | #include <utils/Vector.h> |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 34 | #include "OMX_Video.h" |
Nipun Kwatra | 7d435c5 | 2010-08-02 11:30:06 -0700 | [diff] [blame] | 35 | #include <limits.h> |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 36 | |
| 37 | namespace android { |
| 38 | |
| 39 | // static |
Nipun Kwatra | 4a857e6 | 2010-09-02 11:43:15 -0700 | [diff] [blame] | 40 | CameraSourceTimeLapse *CameraSourceTimeLapse::Create( |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 41 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
Nipun Kwatra | d01371b | 2010-07-20 21:33:31 -0700 | [diff] [blame] | 42 | int32_t width, int32_t height, |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 43 | int32_t videoFrameRate) { |
| 44 | sp<Camera> camera = Camera::connect(0); |
| 45 | |
| 46 | if (camera.get() == NULL) { |
| 47 | return NULL; |
| 48 | } |
| 49 | |
Nipun Kwatra | 4a857e6 | 2010-09-02 11:43:15 -0700 | [diff] [blame] | 50 | return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs, |
| 51 | width, height, videoFrameRate); |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 52 | } |
| 53 | |
| 54 | // static |
| 55 | CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera, |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 56 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
Nipun Kwatra | d01371b | 2010-07-20 21:33:31 -0700 | [diff] [blame] | 57 | int32_t width, int32_t height, |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 58 | int32_t videoFrameRate) { |
| 59 | if (camera.get() == NULL) { |
| 60 | return NULL; |
| 61 | } |
| 62 | |
Nipun Kwatra | 4a857e6 | 2010-09-02 11:43:15 -0700 | [diff] [blame] | 63 | return new CameraSourceTimeLapse(camera, timeBetweenTimeLapseFrameCaptureUs, |
| 64 | width, height, videoFrameRate); |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 65 | } |
| 66 | |
| 67 | CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera, |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 68 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
Nipun Kwatra | d01371b | 2010-07-20 21:33:31 -0700 | [diff] [blame] | 69 | int32_t width, int32_t height, |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 70 | int32_t videoFrameRate) |
| 71 | : CameraSource(camera), |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 72 | mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), |
| 73 | mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), |
| 74 | mLastTimeLapseFrameRealTimestampUs(0), |
| 75 | mSkipCurrentFrame(false) { |
| 76 | |
| 77 | LOGV("starting time lapse mode"); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 78 | mVideoWidth = width; |
| 79 | mVideoHeight = height; |
Nipun Kwatra | 4a857e6 | 2010-09-02 11:43:15 -0700 | [diff] [blame] | 80 | |
| 81 | if (trySettingPreviewSize(width, height)) { |
| 82 | mUseStillCameraForTimeLapse = false; |
| 83 | } else { |
| 84 | // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater |
| 85 | // than the fastest rate at which the still camera can take pictures. |
| 86 | mUseStillCameraForTimeLapse = true; |
Nipun Kwatra | b1fb607 | 2010-07-30 18:30:55 -0700 | [diff] [blame] | 87 | CHECK(setPictureSizeToClosestSupported(width, height)); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 88 | mNeedCropping = computeCropRectangleOffset(); |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 89 | mMeta->setInt32(kKeyWidth, width); |
| 90 | mMeta->setInt32(kKeyHeight, height); |
| 91 | } |
| 92 | } |
| 93 | |
| 94 | CameraSourceTimeLapse::~CameraSourceTimeLapse() { |
| 95 | } |
| 96 | |
Nipun Kwatra | 4a857e6 | 2010-09-02 11:43:15 -0700 | [diff] [blame] | 97 | bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) { |
| 98 | int64_t token = IPCThreadState::self()->clearCallingIdentity(); |
| 99 | String8 s = mCamera->getParameters(); |
| 100 | IPCThreadState::self()->restoreCallingIdentity(token); |
| 101 | |
| 102 | CameraParameters params(s); |
| 103 | Vector<Size> supportedSizes; |
| 104 | params.getSupportedPreviewSizes(supportedSizes); |
| 105 | |
| 106 | bool previewSizeSupported = false; |
| 107 | for (uint32_t i = 0; i < supportedSizes.size(); ++i) { |
| 108 | int32_t pictureWidth = supportedSizes[i].width; |
| 109 | int32_t pictureHeight = supportedSizes[i].height; |
| 110 | |
| 111 | if ((pictureWidth == width) && (pictureHeight == height)) { |
| 112 | previewSizeSupported = true; |
| 113 | } |
| 114 | } |
| 115 | |
| 116 | if (previewSizeSupported) { |
| 117 | LOGV("Video size (%d, %d) is a supported preview size", width, height); |
| 118 | params.setPreviewSize(width, height); |
| 119 | CHECK(mCamera->setParameters(params.flatten())); |
| 120 | return true; |
| 121 | } |
| 122 | |
| 123 | return false; |
| 124 | } |
| 125 | |
Nipun Kwatra | b1fb607 | 2010-07-30 18:30:55 -0700 | [diff] [blame] | 126 | bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { |
| 127 | int64_t token = IPCThreadState::self()->clearCallingIdentity(); |
| 128 | String8 s = mCamera->getParameters(); |
| 129 | IPCThreadState::self()->restoreCallingIdentity(token); |
| 130 | |
| 131 | CameraParameters params(s); |
| 132 | Vector<Size> supportedSizes; |
| 133 | params.getSupportedPictureSizes(supportedSizes); |
| 134 | |
| 135 | int32_t minPictureSize = INT_MAX; |
| 136 | for (uint32_t i = 0; i < supportedSizes.size(); ++i) { |
| 137 | int32_t pictureWidth = supportedSizes[i].width; |
| 138 | int32_t pictureHeight = supportedSizes[i].height; |
| 139 | |
| 140 | if ((pictureWidth >= width) && (pictureHeight >= height)) { |
| 141 | int32_t pictureSize = pictureWidth*pictureHeight; |
| 142 | if (pictureSize < minPictureSize) { |
| 143 | minPictureSize = pictureSize; |
| 144 | mPictureWidth = pictureWidth; |
| 145 | mPictureHeight = pictureHeight; |
| 146 | } |
| 147 | } |
| 148 | } |
| 149 | LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); |
| 150 | return (minPictureSize != INT_MAX); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 151 | } |
| 152 | |
| 153 | bool CameraSourceTimeLapse::computeCropRectangleOffset() { |
| 154 | if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { |
| 155 | return false; |
| 156 | } |
| 157 | |
| 158 | CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); |
| 159 | |
| 160 | int32_t widthDifference = mPictureWidth - mVideoWidth; |
| 161 | int32_t heightDifference = mPictureHeight - mVideoHeight; |
| 162 | |
| 163 | mCropRectStartX = widthDifference/2; |
| 164 | mCropRectStartY = heightDifference/2; |
| 165 | |
| 166 | LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); |
| 167 | |
| 168 | return true; |
| 169 | } |
| 170 | |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 171 | // static |
| 172 | void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { |
| 173 | CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); |
| 174 | source->threadTimeLapseEntry(); |
| 175 | return NULL; |
| 176 | } |
| 177 | |
| 178 | void CameraSourceTimeLapse::threadTimeLapseEntry() { |
| 179 | while(mStarted) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 180 | if (mCameraIdle) { |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 181 | LOGV("threadTimeLapseEntry: taking picture"); |
| 182 | CHECK_EQ(OK, mCamera->takePicture()); |
| 183 | mCameraIdle = false; |
Nipun Kwatra | 7913998 | 2010-08-10 12:08:17 -0700 | [diff] [blame] | 184 | usleep(mTimeBetweenTimeLapseFrameCaptureUs); |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 185 | } else { |
| 186 | LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little."); |
Nipun Kwatra | 7913998 | 2010-08-10 12:08:17 -0700 | [diff] [blame] | 187 | usleep(1E4); |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 188 | } |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 189 | } |
| 190 | } |
| 191 | |
| 192 | void CameraSourceTimeLapse::startCameraRecording() { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 193 | if (mUseStillCameraForTimeLapse) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 194 | LOGV("start time lapse recording using still camera"); |
| 195 | |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 196 | int64_t token = IPCThreadState::self()->clearCallingIdentity(); |
| 197 | String8 s = mCamera->getParameters(); |
| 198 | IPCThreadState::self()->restoreCallingIdentity(token); |
| 199 | |
| 200 | CameraParameters params(s); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 201 | params.setPictureSize(mPictureWidth, mPictureHeight); |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 202 | mCamera->setParameters(params.flatten()); |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 203 | mCameraIdle = true; |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 204 | |
Nipun Kwatra | 8e02ca7 | 2010-09-14 21:22:59 -0700 | [diff] [blame] | 205 | // disable shutter sound and play the recording sound. |
| 206 | mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); |
| 207 | mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); |
| 208 | |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 209 | // create a thread which takes pictures in a loop |
| 210 | pthread_attr_t attr; |
| 211 | pthread_attr_init(&attr); |
| 212 | pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); |
| 213 | |
| 214 | pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); |
| 215 | pthread_attr_destroy(&attr); |
| 216 | } else { |
| 217 | LOGV("start time lapse recording using video camera"); |
| 218 | CHECK_EQ(OK, mCamera->startRecording()); |
| 219 | } |
| 220 | } |
| 221 | |
| 222 | void CameraSourceTimeLapse::stopCameraRecording() { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 223 | if (mUseStillCameraForTimeLapse) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 224 | void *dummy; |
| 225 | pthread_join(mThreadTimeLapse, &dummy); |
Nipun Kwatra | 8e02ca7 | 2010-09-14 21:22:59 -0700 | [diff] [blame] | 226 | |
| 227 | // play the recording sound and restart preview. |
| 228 | mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); |
Nipun Kwatra | c44cf62 | 2010-08-10 17:54:11 -0700 | [diff] [blame] | 229 | CHECK_EQ(OK, mCamera->startPreview()); |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 230 | } else { |
| 231 | mCamera->stopRecording(); |
| 232 | } |
| 233 | } |
| 234 | |
| 235 | void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 236 | if (!mUseStillCameraForTimeLapse) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 237 | mCamera->releaseRecordingFrame(frame); |
| 238 | } |
| 239 | } |
| 240 | |
| 241 | sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { |
| 242 | size_t source_size = source_data->size(); |
| 243 | void* source_pointer = source_data->pointer(); |
| 244 | |
| 245 | sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); |
| 246 | sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); |
| 247 | memcpy(newMemory->pointer(), source_pointer, source_size); |
| 248 | return newMemory; |
| 249 | } |
| 250 | |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 251 | // Allocates IMemory of final type MemoryBase with the given size. |
| 252 | sp<IMemory> allocateIMemory(size_t size) { |
| 253 | sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); |
| 254 | sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); |
| 255 | return newMemory; |
| 256 | } |
| 257 | |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 258 | // static |
| 259 | void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { |
| 260 | CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); |
| 261 | source->threadStartPreview(); |
| 262 | return NULL; |
| 263 | } |
| 264 | |
| 265 | void CameraSourceTimeLapse::threadStartPreview() { |
| 266 | CHECK_EQ(OK, mCamera->startPreview()); |
| 267 | mCameraIdle = true; |
| 268 | } |
| 269 | |
| 270 | void CameraSourceTimeLapse::restartPreview() { |
| 271 | // Start this in a different thread, so that the dataCallback can return |
| 272 | LOGV("restartPreview"); |
| 273 | pthread_attr_t attr; |
| 274 | pthread_attr_init(&attr); |
| 275 | pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); |
| 276 | |
| 277 | pthread_t threadPreview; |
| 278 | pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); |
| 279 | pthread_attr_destroy(&attr); |
| 280 | } |
| 281 | |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 282 | sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { |
| 283 | // find the YUV format |
| 284 | int32_t srcFormat; |
| 285 | CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); |
| 286 | YUVImage::YUVFormat yuvFormat; |
| 287 | if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { |
| 288 | yuvFormat = YUVImage::YUV420SemiPlanar; |
James Dong | 5d1d920 | 2010-09-15 16:58:44 -0700 | [diff] [blame^] | 289 | } else { |
| 290 | CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 291 | yuvFormat = YUVImage::YUV420Planar; |
| 292 | } |
| 293 | |
| 294 | // allocate memory for cropped image and setup a canvas using it. |
| 295 | sp<IMemory> croppedImageMemory = allocateIMemory( |
| 296 | YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); |
| 297 | YUVImage yuvImageCropped(yuvFormat, |
| 298 | mVideoWidth, mVideoHeight, |
| 299 | (uint8_t *)croppedImageMemory->pointer()); |
| 300 | YUVCanvas yuvCanvasCrop(yuvImageCropped); |
| 301 | |
| 302 | YUVImage yuvImageSource(yuvFormat, |
| 303 | mPictureWidth, mPictureHeight, |
| 304 | (uint8_t *)source_data->pointer()); |
| 305 | yuvCanvasCrop.CopyImageRect( |
| 306 | Rect(mCropRectStartX, mCropRectStartY, |
| 307 | mCropRectStartX + mVideoWidth, |
| 308 | mCropRectStartY + mVideoHeight), |
| 309 | 0, 0, |
| 310 | yuvImageSource); |
| 311 | |
| 312 | return croppedImageMemory; |
| 313 | } |
| 314 | |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 315 | void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 316 | if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 317 | // takePicture will complete after this callback, so restart preview. |
| 318 | restartPreview(); |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 319 | return; |
Nipun Kwatra | 4cd8672 | 2010-07-18 15:52:02 -0700 | [diff] [blame] | 320 | } |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 321 | if (msgType != CAMERA_MSG_RAW_IMAGE) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 322 | return; |
| 323 | } |
| 324 | |
| 325 | LOGV("dataCallback for timelapse still frame"); |
| 326 | CHECK_EQ(true, mUseStillCameraForTimeLapse); |
| 327 | |
| 328 | int64_t timestampUs; |
| 329 | if (mNumFramesReceived == 0) { |
| 330 | timestampUs = mStartTimeUs; |
| 331 | } else { |
| 332 | timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; |
| 333 | } |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 334 | |
| 335 | if (mNeedCropping) { |
| 336 | sp<IMemory> croppedImageData = cropYUVImage(data); |
| 337 | dataCallbackTimestamp(timestampUs, msgType, croppedImageData); |
| 338 | } else { |
| 339 | sp<IMemory> dataCopy = createIMemoryCopy(data); |
| 340 | dataCallbackTimestamp(timestampUs, msgType, dataCopy); |
| 341 | } |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 342 | } |
| 343 | |
| 344 | bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 345 | if (mSkipCurrentFrame) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 346 | mSkipCurrentFrame = false; |
| 347 | return true; |
| 348 | } else { |
| 349 | return false; |
| 350 | } |
| 351 | } |
| 352 | |
| 353 | bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 354 | if (!mUseStillCameraForTimeLapse) { |
| 355 | if (mLastTimeLapseFrameRealTimestampUs == 0) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 356 | // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs |
| 357 | // to current time (timestampUs) and save frame data. |
| 358 | LOGV("dataCallbackTimestamp timelapse: initial frame"); |
| 359 | |
| 360 | mLastTimeLapseFrameRealTimestampUs = *timestampUs; |
| 361 | } else if (*timestampUs < |
| 362 | (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { |
| 363 | // Skip all frames from last encoded frame until |
| 364 | // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. |
| 365 | // Tell the camera to release its recording frame and return. |
| 366 | LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); |
| 367 | return true; |
| 368 | } else { |
| 369 | // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: |
| 370 | // - Reset mLastTimeLapseFrameRealTimestampUs to current time. |
| 371 | // - Artificially modify timestampUs to be one frame time (1/framerate) ahead |
| 372 | // of the last encoded frame's time stamp. |
| 373 | LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); |
| 374 | |
| 375 | mLastTimeLapseFrameRealTimestampUs = *timestampUs; |
| 376 | *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; |
| 377 | } |
| 378 | } |
| 379 | return false; |
| 380 | } |
| 381 | |
| 382 | void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, |
| 383 | const sp<IMemory> &data) { |
Nipun Kwatra | dce4beb | 2010-07-27 22:21:44 -0700 | [diff] [blame] | 384 | if (!mUseStillCameraForTimeLapse) { |
Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame] | 385 | mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); |
| 386 | } |
| 387 | CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); |
| 388 | } |
| 389 | |
| 390 | } // namespace android |