Nipun Kwatra | f9b8018 | 2010-07-12 09:17:14 -0700 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright (C) 2010 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | //#define LOG_NDEBUG 0 |
| 18 | #define LOG_TAG "CameraSourceTimeLapse" |
| 19 | |
| 20 | #include <binder/IPCThreadState.h> |
| 21 | #include <binder/MemoryBase.h> |
| 22 | #include <binder/MemoryHeapBase.h> |
| 23 | #include <media/stagefright/CameraSource.h> |
| 24 | #include <media/stagefright/CameraSourceTimeLapse.h> |
| 25 | #include <media/stagefright/MediaDebug.h> |
| 26 | #include <media/stagefright/MetaData.h> |
| 27 | #include <camera/Camera.h> |
| 28 | #include <camera/CameraParameters.h> |
| 29 | #include <utils/String8.h> |
| 30 | |
| 31 | namespace android { |
| 32 | |
| 33 | // static |
| 34 | CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse, |
| 35 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
| 36 | int32_t videoFrameRate) { |
| 37 | sp<Camera> camera = Camera::connect(0); |
| 38 | |
| 39 | if (camera.get() == NULL) { |
| 40 | return NULL; |
| 41 | } |
| 42 | |
| 43 | return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse, |
| 44 | timeBetweenTimeLapseFrameCaptureUs, videoFrameRate); |
| 45 | } |
| 46 | |
| 47 | // static |
| 48 | CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera, |
| 49 | bool useStillCameraForTimeLapse, |
| 50 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
| 51 | int32_t videoFrameRate) { |
| 52 | if (camera.get() == NULL) { |
| 53 | return NULL; |
| 54 | } |
| 55 | |
| 56 | return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse, |
| 57 | timeBetweenTimeLapseFrameCaptureUs, videoFrameRate); |
| 58 | } |
| 59 | |
| 60 | CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera, |
| 61 | bool useStillCameraForTimeLapse, |
| 62 | int64_t timeBetweenTimeLapseFrameCaptureUs, |
| 63 | int32_t videoFrameRate) |
| 64 | : CameraSource(camera), |
| 65 | mUseStillCameraForTimeLapse(useStillCameraForTimeLapse), |
| 66 | mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), |
| 67 | mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), |
| 68 | mLastTimeLapseFrameRealTimestampUs(0), |
| 69 | mSkipCurrentFrame(false) { |
| 70 | |
| 71 | LOGV("starting time lapse mode"); |
| 72 | if(mUseStillCameraForTimeLapse) { |
| 73 | // Currently hardcoded the picture size. Will need to choose |
| 74 | // automatically or pass in from the app. |
| 75 | int32_t width, height; |
| 76 | width = 1024; |
| 77 | height = 768; |
| 78 | mMeta->setInt32(kKeyWidth, width); |
| 79 | mMeta->setInt32(kKeyHeight, height); |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | CameraSourceTimeLapse::~CameraSourceTimeLapse() { |
| 84 | } |
| 85 | |
| 86 | // static |
| 87 | void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { |
| 88 | CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); |
| 89 | source->threadTimeLapseEntry(); |
| 90 | return NULL; |
| 91 | } |
| 92 | |
| 93 | void CameraSourceTimeLapse::threadTimeLapseEntry() { |
| 94 | while(mStarted) { |
| 95 | LOGV("threadTimeLapseEntry loop"); |
| 96 | sleep(mTimeBetweenTimeLapseFrameCaptureUs/1E6); |
| 97 | CHECK_EQ(OK, mCamera->takePicture()); |
| 98 | } |
| 99 | } |
| 100 | |
| 101 | void CameraSourceTimeLapse::startCameraRecording() { |
| 102 | if(mUseStillCameraForTimeLapse) { |
| 103 | LOGV("start time lapse recording using still camera"); |
| 104 | |
| 105 | int32_t width; |
| 106 | int32_t height; |
| 107 | mMeta->findInt32(kKeyWidth, &width); |
| 108 | mMeta->findInt32(kKeyHeight, &height); |
| 109 | |
| 110 | int64_t token = IPCThreadState::self()->clearCallingIdentity(); |
| 111 | String8 s = mCamera->getParameters(); |
| 112 | IPCThreadState::self()->restoreCallingIdentity(token); |
| 113 | |
| 114 | CameraParameters params(s); |
| 115 | |
| 116 | params.setPictureSize(width, height); |
| 117 | mCamera->setParameters(params.flatten()); |
| 118 | |
| 119 | CHECK_EQ(OK, mCamera->takePicture()); |
| 120 | |
| 121 | // create a thread which takes pictures in a loop |
| 122 | pthread_attr_t attr; |
| 123 | pthread_attr_init(&attr); |
| 124 | pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); |
| 125 | |
| 126 | pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); |
| 127 | pthread_attr_destroy(&attr); |
| 128 | } else { |
| 129 | LOGV("start time lapse recording using video camera"); |
| 130 | CHECK_EQ(OK, mCamera->startRecording()); |
| 131 | } |
| 132 | } |
| 133 | |
| 134 | void CameraSourceTimeLapse::stopCameraRecording() { |
| 135 | if(mUseStillCameraForTimeLapse) { |
| 136 | void *dummy; |
| 137 | pthread_join(mThreadTimeLapse, &dummy); |
| 138 | } else { |
| 139 | mCamera->stopRecording(); |
| 140 | } |
| 141 | } |
| 142 | |
| 143 | void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { |
| 144 | if(!mUseStillCameraForTimeLapse) { |
| 145 | mCamera->releaseRecordingFrame(frame); |
| 146 | } |
| 147 | } |
| 148 | |
| 149 | sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { |
| 150 | size_t source_size = source_data->size(); |
| 151 | void* source_pointer = source_data->pointer(); |
| 152 | |
| 153 | sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); |
| 154 | sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); |
| 155 | memcpy(newMemory->pointer(), source_pointer, source_size); |
| 156 | return newMemory; |
| 157 | } |
| 158 | |
| 159 | void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { |
| 160 | if(msgType != CAMERA_MSG_RAW_IMAGE) { |
| 161 | return; |
| 162 | } |
| 163 | |
| 164 | LOGV("dataCallback for timelapse still frame"); |
| 165 | CHECK_EQ(true, mUseStillCameraForTimeLapse); |
| 166 | |
| 167 | int64_t timestampUs; |
| 168 | if (mNumFramesReceived == 0) { |
| 169 | timestampUs = mStartTimeUs; |
| 170 | } else { |
| 171 | timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; |
| 172 | } |
| 173 | sp<IMemory> dataCopy = createIMemoryCopy(data); |
| 174 | dataCallbackTimestamp(timestampUs, msgType, dataCopy); |
| 175 | } |
| 176 | |
| 177 | bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { |
| 178 | if(mSkipCurrentFrame) { |
| 179 | mSkipCurrentFrame = false; |
| 180 | return true; |
| 181 | } else { |
| 182 | return false; |
| 183 | } |
| 184 | } |
| 185 | |
| 186 | bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { |
| 187 | if(!mUseStillCameraForTimeLapse) { |
| 188 | if(mLastTimeLapseFrameRealTimestampUs == 0) { |
| 189 | // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs |
| 190 | // to current time (timestampUs) and save frame data. |
| 191 | LOGV("dataCallbackTimestamp timelapse: initial frame"); |
| 192 | |
| 193 | mLastTimeLapseFrameRealTimestampUs = *timestampUs; |
| 194 | } else if (*timestampUs < |
| 195 | (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { |
| 196 | // Skip all frames from last encoded frame until |
| 197 | // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. |
| 198 | // Tell the camera to release its recording frame and return. |
| 199 | LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); |
| 200 | return true; |
| 201 | } else { |
| 202 | // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: |
| 203 | // - Reset mLastTimeLapseFrameRealTimestampUs to current time. |
| 204 | // - Artificially modify timestampUs to be one frame time (1/framerate) ahead |
| 205 | // of the last encoded frame's time stamp. |
| 206 | LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); |
| 207 | |
| 208 | mLastTimeLapseFrameRealTimestampUs = *timestampUs; |
| 209 | *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; |
| 210 | } |
| 211 | } |
| 212 | return false; |
| 213 | } |
| 214 | |
| 215 | void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, |
| 216 | const sp<IMemory> &data) { |
| 217 | if(!mUseStillCameraForTimeLapse) { |
| 218 | mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); |
| 219 | } |
| 220 | CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); |
| 221 | } |
| 222 | |
| 223 | } // namespace android |