blob: 854afddc72bf8d3d0d536ddbed3d94e8b68bc75b [file] [log] [blame]
Nipun Kwatraf9b80182010-07-12 09:17:14 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "CameraSourceTimeLapse"
19
20#include <binder/IPCThreadState.h>
21#include <binder/MemoryBase.h>
22#include <binder/MemoryHeapBase.h>
23#include <media/stagefright/CameraSource.h>
24#include <media/stagefright/CameraSourceTimeLapse.h>
25#include <media/stagefright/MediaDebug.h>
26#include <media/stagefright/MetaData.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070027#include <media/stagefright/YUVImage.h>
28#include <media/stagefright/YUVCanvas.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070029#include <camera/Camera.h>
30#include <camera/CameraParameters.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070031#include <ui/Rect.h>
Nipun Kwatraf9b80182010-07-12 09:17:14 -070032#include <utils/String8.h>
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070033#include <utils/Vector.h>
Nipun Kwatradce4beb2010-07-27 22:21:44 -070034#include "OMX_Video.h"
Nipun Kwatraf9b80182010-07-12 09:17:14 -070035
36namespace android {
37
38// static
39CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse,
40 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070041 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070042 int32_t videoFrameRate) {
43 sp<Camera> camera = Camera::connect(0);
44
45 if (camera.get() == NULL) {
46 return NULL;
47 }
48
49 return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070050 timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070051}
52
53// static
54CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera,
55 bool useStillCameraForTimeLapse,
56 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070057 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070058 int32_t videoFrameRate) {
59 if (camera.get() == NULL) {
60 return NULL;
61 }
62
63 return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070064 timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate);
Nipun Kwatraf9b80182010-07-12 09:17:14 -070065}
66
67CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera,
68 bool useStillCameraForTimeLapse,
69 int64_t timeBetweenTimeLapseFrameCaptureUs,
Nipun Kwatrad01371b2010-07-20 21:33:31 -070070 int32_t width, int32_t height,
Nipun Kwatraf9b80182010-07-12 09:17:14 -070071 int32_t videoFrameRate)
72 : CameraSource(camera),
73 mUseStillCameraForTimeLapse(useStillCameraForTimeLapse),
74 mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
75 mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
76 mLastTimeLapseFrameRealTimestampUs(0),
77 mSkipCurrentFrame(false) {
78
79 LOGV("starting time lapse mode");
Nipun Kwatradce4beb2010-07-27 22:21:44 -070080 mVideoWidth = width;
81 mVideoHeight = height;
82 if (mUseStillCameraForTimeLapse) {
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070083 CHECK(setPictureSizeToClosestSupported(width, height));
Nipun Kwatradce4beb2010-07-27 22:21:44 -070084 mNeedCropping = computeCropRectangleOffset();
Nipun Kwatraf9b80182010-07-12 09:17:14 -070085 mMeta->setInt32(kKeyWidth, width);
86 mMeta->setInt32(kKeyHeight, height);
87 }
88}
89
90CameraSourceTimeLapse::~CameraSourceTimeLapse() {
91}
92
Nipun Kwatrab1fb6072010-07-30 18:30:55 -070093bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
94 int64_t token = IPCThreadState::self()->clearCallingIdentity();
95 String8 s = mCamera->getParameters();
96 IPCThreadState::self()->restoreCallingIdentity(token);
97
98 CameraParameters params(s);
99 Vector<Size> supportedSizes;
100 params.getSupportedPictureSizes(supportedSizes);
101
102 int32_t minPictureSize = INT_MAX;
103 for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
104 int32_t pictureWidth = supportedSizes[i].width;
105 int32_t pictureHeight = supportedSizes[i].height;
106
107 if ((pictureWidth >= width) && (pictureHeight >= height)) {
108 int32_t pictureSize = pictureWidth*pictureHeight;
109 if (pictureSize < minPictureSize) {
110 minPictureSize = pictureSize;
111 mPictureWidth = pictureWidth;
112 mPictureHeight = pictureHeight;
113 }
114 }
115 }
116 LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
117 return (minPictureSize != INT_MAX);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700118}
119
120bool CameraSourceTimeLapse::computeCropRectangleOffset() {
121 if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
122 return false;
123 }
124
125 CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
126
127 int32_t widthDifference = mPictureWidth - mVideoWidth;
128 int32_t heightDifference = mPictureHeight - mVideoHeight;
129
130 mCropRectStartX = widthDifference/2;
131 mCropRectStartY = heightDifference/2;
132
133 LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
134
135 return true;
136}
137
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700138// static
139void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
140 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
141 source->threadTimeLapseEntry();
142 return NULL;
143}
144
145void CameraSourceTimeLapse::threadTimeLapseEntry() {
146 while(mStarted) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700147 if (mCameraIdle) {
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700148 LOGV("threadTimeLapseEntry: taking picture");
149 CHECK_EQ(OK, mCamera->takePicture());
150 mCameraIdle = false;
151 sleep(mTimeBetweenTimeLapseFrameCaptureUs/1E6);
152 } else {
153 LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little.");
154 sleep(.01);
155 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700156 }
157}
158
159void CameraSourceTimeLapse::startCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700160 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700161 LOGV("start time lapse recording using still camera");
162
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700163 int64_t token = IPCThreadState::self()->clearCallingIdentity();
164 String8 s = mCamera->getParameters();
165 IPCThreadState::self()->restoreCallingIdentity(token);
166
167 CameraParameters params(s);
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700168 params.setPictureSize(mPictureWidth, mPictureHeight);
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700169 mCamera->setParameters(params.flatten());
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700170 mCameraIdle = true;
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700171
172 // create a thread which takes pictures in a loop
173 pthread_attr_t attr;
174 pthread_attr_init(&attr);
175 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
176
177 pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
178 pthread_attr_destroy(&attr);
179 } else {
180 LOGV("start time lapse recording using video camera");
181 CHECK_EQ(OK, mCamera->startRecording());
182 }
183}
184
185void CameraSourceTimeLapse::stopCameraRecording() {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700186 if (mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700187 void *dummy;
188 pthread_join(mThreadTimeLapse, &dummy);
189 } else {
190 mCamera->stopRecording();
191 }
192}
193
194void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700195 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700196 mCamera->releaseRecordingFrame(frame);
197 }
198}
199
200sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
201 size_t source_size = source_data->size();
202 void* source_pointer = source_data->pointer();
203
204 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
205 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
206 memcpy(newMemory->pointer(), source_pointer, source_size);
207 return newMemory;
208}
209
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700210// Allocates IMemory of final type MemoryBase with the given size.
211sp<IMemory> allocateIMemory(size_t size) {
212 sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
213 sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
214 return newMemory;
215}
216
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700217// static
218void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
219 CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
220 source->threadStartPreview();
221 return NULL;
222}
223
224void CameraSourceTimeLapse::threadStartPreview() {
225 CHECK_EQ(OK, mCamera->startPreview());
226 mCameraIdle = true;
227}
228
229void CameraSourceTimeLapse::restartPreview() {
230 // Start this in a different thread, so that the dataCallback can return
231 LOGV("restartPreview");
232 pthread_attr_t attr;
233 pthread_attr_init(&attr);
234 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
235
236 pthread_t threadPreview;
237 pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
238 pthread_attr_destroy(&attr);
239}
240
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700241sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
242 // find the YUV format
243 int32_t srcFormat;
244 CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
245 YUVImage::YUVFormat yuvFormat;
246 if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
247 yuvFormat = YUVImage::YUV420SemiPlanar;
248 } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
249 yuvFormat = YUVImage::YUV420Planar;
250 }
251
252 // allocate memory for cropped image and setup a canvas using it.
253 sp<IMemory> croppedImageMemory = allocateIMemory(
254 YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
255 YUVImage yuvImageCropped(yuvFormat,
256 mVideoWidth, mVideoHeight,
257 (uint8_t *)croppedImageMemory->pointer());
258 YUVCanvas yuvCanvasCrop(yuvImageCropped);
259
260 YUVImage yuvImageSource(yuvFormat,
261 mPictureWidth, mPictureHeight,
262 (uint8_t *)source_data->pointer());
263 yuvCanvasCrop.CopyImageRect(
264 Rect(mCropRectStartX, mCropRectStartY,
265 mCropRectStartX + mVideoWidth,
266 mCropRectStartY + mVideoHeight),
267 0, 0,
268 yuvImageSource);
269
270 return croppedImageMemory;
271}
272
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700273void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700274 if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700275 // takePicture will complete after this callback, so restart preview.
276 restartPreview();
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700277 return;
Nipun Kwatra4cd86722010-07-18 15:52:02 -0700278 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700279 if (msgType != CAMERA_MSG_RAW_IMAGE) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700280 return;
281 }
282
283 LOGV("dataCallback for timelapse still frame");
284 CHECK_EQ(true, mUseStillCameraForTimeLapse);
285
286 int64_t timestampUs;
287 if (mNumFramesReceived == 0) {
288 timestampUs = mStartTimeUs;
289 } else {
290 timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
291 }
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700292
293 if (mNeedCropping) {
294 sp<IMemory> croppedImageData = cropYUVImage(data);
295 dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
296 } else {
297 sp<IMemory> dataCopy = createIMemoryCopy(data);
298 dataCallbackTimestamp(timestampUs, msgType, dataCopy);
299 }
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700300}
301
302bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700303 if (mSkipCurrentFrame) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700304 mSkipCurrentFrame = false;
305 return true;
306 } else {
307 return false;
308 }
309}
310
311bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700312 if (!mUseStillCameraForTimeLapse) {
313 if (mLastTimeLapseFrameRealTimestampUs == 0) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700314 // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
315 // to current time (timestampUs) and save frame data.
316 LOGV("dataCallbackTimestamp timelapse: initial frame");
317
318 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
319 } else if (*timestampUs <
320 (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
321 // Skip all frames from last encoded frame until
322 // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
323 // Tell the camera to release its recording frame and return.
324 LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
325 return true;
326 } else {
327 // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
328 // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
329 // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
330 // of the last encoded frame's time stamp.
331 LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
332
333 mLastTimeLapseFrameRealTimestampUs = *timestampUs;
334 *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
335 }
336 }
337 return false;
338}
339
340void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
341 const sp<IMemory> &data) {
Nipun Kwatradce4beb2010-07-27 22:21:44 -0700342 if (!mUseStillCameraForTimeLapse) {
Nipun Kwatraf9b80182010-07-12 09:17:14 -0700343 mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
344 }
345 CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
346}
347
348} // namespace android